xref: /petsc/src/dm/impls/plex/plexpartition.c (revision 075987266ca0d0e8d09d008387b2c5b96849a557)
1 #include <petsc/private/dmpleximpl.h>   /*I      "petscdmplex.h"   I*/
2 
3 PetscClassId PETSCPARTITIONER_CLASSID = 0;
4 
5 PetscFunctionList PetscPartitionerList              = NULL;
6 PetscBool         PetscPartitionerRegisterAllCalled = PETSC_FALSE;
7 
8 PetscBool ChacoPartitionercite = PETSC_FALSE;
9 const char ChacoPartitionerCitation[] = "@inproceedings{Chaco95,\n"
10                                "  author    = {Bruce Hendrickson and Robert Leland},\n"
11                                "  title     = {A multilevel algorithm for partitioning graphs},\n"
12                                "  booktitle = {Supercomputing '95: Proceedings of the 1995 ACM/IEEE Conference on Supercomputing (CDROM)},"
13                                "  isbn      = {0-89791-816-9},\n"
14                                "  pages     = {28},\n"
15                                "  doi       = {http://doi.acm.org/10.1145/224170.224228},\n"
16                                "  publisher = {ACM Press},\n"
17                                "  address   = {New York},\n"
18                                "  year      = {1995}\n}\n";
19 
20 PetscBool ParMetisPartitionercite = PETSC_FALSE;
21 const char ParMetisPartitionerCitation[] = "@article{KarypisKumar98,\n"
22                                "  author  = {George Karypis and Vipin Kumar},\n"
23                                "  title   = {A Parallel Algorithm for Multilevel Graph Partitioning and Sparse Matrix Ordering},\n"
24                                "  journal = {Journal of Parallel and Distributed Computing},\n"
25                                "  volume  = {48},\n"
26                                "  pages   = {71--85},\n"
27                                "  year    = {1998}\n}\n";
28 
29 #undef __FUNCT__
30 #define __FUNCT__ "DMPlexCreatePartitionerGraph"
31 /*@C
32   DMPlexCreatePartitionerGraph - Create a CSR graph of point connections for the partitioner
33 
34   Input Parameters:
35 + dm      - The mesh DM dm
36 - height  - Height of the strata from which to construct the graph
37 
38   Output Parameter:
39 + numVertices     - Number of vertices in the graph
40 . offsets         - Point offsets in the graph
41 . adjacency       - Point connectivity in the graph
42 - globalNumbering - A map from the local cell numbering to the global numbering used in "adjacency".  Negative indicates that the cell is a duplicate from another process.
43 
44   The user can control the definition of adjacency for the mesh using DMPlexGetAdjacencyUseCone() and
45   DMPlexSetAdjacencyUseClosure(). They should choose the combination appropriate for the function
46   representation on the mesh.
47 
48   Level: developer
49 
50 .seealso: PetscPartitionerGetType(), PetscPartitionerCreate(), DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure()
51 @*/
52 PetscErrorCode DMPlexCreatePartitionerGraph(DM dm, PetscInt height, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency, IS *globalNumbering)
53 {
54   PetscInt       p, pStart, pEnd, a, adjSize, idx, size, nroots;
55   PetscInt      *adj = NULL, *vOffsets = NULL, *graph = NULL;
56   IS             cellNumbering;
57   const PetscInt *cellNum;
58   PetscBool      useCone, useClosure;
59   PetscSection   section;
60   PetscSegBuffer adjBuffer;
61   PetscSF        sfPoint;
62   PetscMPIInt    rank;
63   PetscErrorCode ierr;
64 
65   PetscFunctionBegin;
66   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
67   ierr = DMPlexGetHeightStratum(dm, height, &pStart, &pEnd);CHKERRQ(ierr);
68   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
69   ierr = PetscSFGetGraph(sfPoint, &nroots, NULL, NULL, NULL);CHKERRQ(ierr);
70   /* Build adjacency graph via a section/segbuffer */
71   ierr = PetscSectionCreate(PetscObjectComm((PetscObject) dm), &section);CHKERRQ(ierr);
72   ierr = PetscSectionSetChart(section, pStart, pEnd);CHKERRQ(ierr);
73   ierr = PetscSegBufferCreate(sizeof(PetscInt),1000,&adjBuffer);CHKERRQ(ierr);
74   /* Always use FVM adjacency to create partitioner graph */
75   ierr = DMPlexGetAdjacencyUseCone(dm, &useCone);CHKERRQ(ierr);
76   ierr = DMPlexGetAdjacencyUseClosure(dm, &useClosure);CHKERRQ(ierr);
77   ierr = DMPlexSetAdjacencyUseCone(dm, PETSC_TRUE);CHKERRQ(ierr);
78   ierr = DMPlexSetAdjacencyUseClosure(dm, PETSC_FALSE);CHKERRQ(ierr);
79   ierr = DMPlexCreateCellNumbering_Internal(dm, PETSC_TRUE, &cellNumbering);CHKERRQ(ierr);
80   if (globalNumbering) {
81     ierr = PetscObjectReference((PetscObject)cellNumbering);CHKERRQ(ierr);
82     *globalNumbering = cellNumbering;
83   }
84   ierr = ISGetIndices(cellNumbering, &cellNum);CHKERRQ(ierr);
85   for (*numVertices = 0, p = pStart; p < pEnd; p++) {
86     /* Skip non-owned cells in parallel (ParMetis expects no overlap) */
87     if (nroots > 0) {if (cellNum[p] < 0) continue;}
88     adjSize = PETSC_DETERMINE;
89     ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
90     for (a = 0; a < adjSize; ++a) {
91       const PetscInt point = adj[a];
92       if (point != p && pStart <= point && point < pEnd) {
93         PetscInt *PETSC_RESTRICT pBuf;
94         ierr = PetscSectionAddDof(section, p, 1);CHKERRQ(ierr);
95         ierr = PetscSegBufferGetInts(adjBuffer, 1, &pBuf);CHKERRQ(ierr);
96         *pBuf = point;
97       }
98     }
99     (*numVertices)++;
100   }
101   ierr = DMPlexSetAdjacencyUseCone(dm, useCone);CHKERRQ(ierr);
102   ierr = DMPlexSetAdjacencyUseClosure(dm, useClosure);CHKERRQ(ierr);
103   /* Derive CSR graph from section/segbuffer */
104   ierr = PetscSectionSetUp(section);CHKERRQ(ierr);
105   ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr);
106   ierr = PetscMalloc1(*numVertices+1, &vOffsets);CHKERRQ(ierr);
107   for (idx = 0, p = pStart; p < pEnd; p++) {
108     if (nroots > 0) {if (cellNum[p] < 0) continue;}
109     ierr = PetscSectionGetOffset(section, p, &(vOffsets[idx++]));CHKERRQ(ierr);
110   }
111   vOffsets[*numVertices] = size;
112   if (offsets) *offsets = vOffsets;
113   ierr = PetscSegBufferExtractAlloc(adjBuffer, &graph);CHKERRQ(ierr);
114   {
115     ISLocalToGlobalMapping ltogCells;
116     PetscInt n, size, *cells_arr;
117     /* In parallel, apply a global cell numbering to the graph */
118     ierr = ISRestoreIndices(cellNumbering, &cellNum);CHKERRQ(ierr);
119     ierr = ISLocalToGlobalMappingCreateIS(cellNumbering, &ltogCells);CHKERRQ(ierr);
120     ierr = ISLocalToGlobalMappingGetSize(ltogCells, &size);CHKERRQ(ierr);
121     ierr = ISLocalToGlobalMappingGetIndices(ltogCells, (const PetscInt**)&cells_arr);CHKERRQ(ierr);
122     /* Convert to positive global cell numbers */
123     for (n=0; n<size; n++) {if (cells_arr[n] < 0) cells_arr[n] = -(cells_arr[n]+1);}
124     ierr = ISLocalToGlobalMappingRestoreIndices(ltogCells, (const PetscInt**)&cells_arr);CHKERRQ(ierr);
125     ierr = ISLocalToGlobalMappingApplyBlock(ltogCells, vOffsets[*numVertices], graph, graph);CHKERRQ(ierr);
126     ierr = ISLocalToGlobalMappingDestroy(&ltogCells);CHKERRQ(ierr);
127     ierr = ISDestroy(&cellNumbering);CHKERRQ(ierr);
128   }
129   if (adjacency) *adjacency = graph;
130   /* Clean up */
131   ierr = PetscSegBufferDestroy(&adjBuffer);CHKERRQ(ierr);
132   ierr = PetscSectionDestroy(&section);CHKERRQ(ierr);
133   ierr = PetscFree(adj);CHKERRQ(ierr);
134   PetscFunctionReturn(0);
135 }
136 
137 #undef __FUNCT__
138 #define __FUNCT__ "DMPlexCreateNeighborCSR"
139 PetscErrorCode DMPlexCreateNeighborCSR(DM dm, PetscInt cellHeight, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency)
140 {
141   const PetscInt maxFaceCases = 30;
142   PetscInt       numFaceCases = 0;
143   PetscInt       numFaceVertices[30]; /* maxFaceCases, C89 sucks sucks sucks */
144   PetscInt      *off, *adj;
145   PetscInt      *neighborCells = NULL;
146   PetscInt       dim, cellDim, depth = 0, faceDepth, cStart, cEnd, c, numCells, cell;
147   PetscErrorCode ierr;
148 
149   PetscFunctionBegin;
150   /* For parallel partitioning, I think you have to communicate supports */
151   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
152   cellDim = dim - cellHeight;
153   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
154   ierr = DMPlexGetHeightStratum(dm, cellHeight, &cStart, &cEnd);CHKERRQ(ierr);
155   if (cEnd - cStart == 0) {
156     if (numVertices) *numVertices = 0;
157     if (offsets)   *offsets   = NULL;
158     if (adjacency) *adjacency = NULL;
159     PetscFunctionReturn(0);
160   }
161   numCells  = cEnd - cStart;
162   faceDepth = depth - cellHeight;
163   if (dim == depth) {
164     PetscInt f, fStart, fEnd;
165 
166     ierr = PetscCalloc1(numCells+1, &off);CHKERRQ(ierr);
167     /* Count neighboring cells */
168     ierr = DMPlexGetHeightStratum(dm, cellHeight+1, &fStart, &fEnd);CHKERRQ(ierr);
169     for (f = fStart; f < fEnd; ++f) {
170       const PetscInt *support;
171       PetscInt        supportSize;
172       ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
173       ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
174       if (supportSize == 2) {
175         PetscInt numChildren;
176 
177         ierr = DMPlexGetTreeChildren(dm,f,&numChildren,NULL);CHKERRQ(ierr);
178         if (!numChildren) {
179           ++off[support[0]-cStart+1];
180           ++off[support[1]-cStart+1];
181         }
182       }
183     }
184     /* Prefix sum */
185     for (c = 1; c <= numCells; ++c) off[c] += off[c-1];
186     if (adjacency) {
187       PetscInt *tmp;
188 
189       ierr = PetscMalloc1(off[numCells], &adj);CHKERRQ(ierr);
190       ierr = PetscMalloc1(numCells+1, &tmp);CHKERRQ(ierr);
191       ierr = PetscMemcpy(tmp, off, (numCells+1) * sizeof(PetscInt));CHKERRQ(ierr);
192       /* Get neighboring cells */
193       for (f = fStart; f < fEnd; ++f) {
194         const PetscInt *support;
195         PetscInt        supportSize;
196         ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
197         ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
198         if (supportSize == 2) {
199           PetscInt numChildren;
200 
201           ierr = DMPlexGetTreeChildren(dm,f,&numChildren,NULL);CHKERRQ(ierr);
202           if (!numChildren) {
203             adj[tmp[support[0]-cStart]++] = support[1];
204             adj[tmp[support[1]-cStart]++] = support[0];
205           }
206         }
207       }
208 #if defined(PETSC_USE_DEBUG)
209       for (c = 0; c < cEnd-cStart; ++c) if (tmp[c] != off[c+1]) SETERRQ3(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Offset %d != %d for cell %d", tmp[c], off[c], c+cStart);
210 #endif
211       ierr = PetscFree(tmp);CHKERRQ(ierr);
212     }
213     if (numVertices) *numVertices = numCells;
214     if (offsets)   *offsets   = off;
215     if (adjacency) *adjacency = adj;
216     PetscFunctionReturn(0);
217   }
218   /* Setup face recognition */
219   if (faceDepth == 1) {
220     PetscInt cornersSeen[30] = {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0}; /* Could use PetscBT */
221 
222     for (c = cStart; c < cEnd; ++c) {
223       PetscInt corners;
224 
225       ierr = DMPlexGetConeSize(dm, c, &corners);CHKERRQ(ierr);
226       if (!cornersSeen[corners]) {
227         PetscInt nFV;
228 
229         if (numFaceCases >= maxFaceCases) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_PLIB, "Exceeded maximum number of face recognition cases");
230         cornersSeen[corners] = 1;
231 
232         ierr = DMPlexGetNumFaceVertices(dm, cellDim, corners, &nFV);CHKERRQ(ierr);
233 
234         numFaceVertices[numFaceCases++] = nFV;
235       }
236     }
237   }
238   ierr = PetscCalloc1(numCells+1, &off);CHKERRQ(ierr);
239   /* Count neighboring cells */
240   for (cell = cStart; cell < cEnd; ++cell) {
241     PetscInt numNeighbors = PETSC_DETERMINE, n;
242 
243     ierr = DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);CHKERRQ(ierr);
244     /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
245     for (n = 0; n < numNeighbors; ++n) {
246       PetscInt        cellPair[2];
247       PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
248       PetscInt        meetSize = 0;
249       const PetscInt *meet    = NULL;
250 
251       cellPair[0] = cell; cellPair[1] = neighborCells[n];
252       if (cellPair[0] == cellPair[1]) continue;
253       if (!found) {
254         ierr = DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
255         if (meetSize) {
256           PetscInt f;
257 
258           for (f = 0; f < numFaceCases; ++f) {
259             if (numFaceVertices[f] == meetSize) {
260               found = PETSC_TRUE;
261               break;
262             }
263           }
264         }
265         ierr = DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
266       }
267       if (found) ++off[cell-cStart+1];
268     }
269   }
270   /* Prefix sum */
271   for (cell = 1; cell <= numCells; ++cell) off[cell] += off[cell-1];
272 
273   if (adjacency) {
274     ierr = PetscMalloc1(off[numCells], &adj);CHKERRQ(ierr);
275     /* Get neighboring cells */
276     for (cell = cStart; cell < cEnd; ++cell) {
277       PetscInt numNeighbors = PETSC_DETERMINE, n;
278       PetscInt cellOffset   = 0;
279 
280       ierr = DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);CHKERRQ(ierr);
281       /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
282       for (n = 0; n < numNeighbors; ++n) {
283         PetscInt        cellPair[2];
284         PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
285         PetscInt        meetSize = 0;
286         const PetscInt *meet    = NULL;
287 
288         cellPair[0] = cell; cellPair[1] = neighborCells[n];
289         if (cellPair[0] == cellPair[1]) continue;
290         if (!found) {
291           ierr = DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
292           if (meetSize) {
293             PetscInt f;
294 
295             for (f = 0; f < numFaceCases; ++f) {
296               if (numFaceVertices[f] == meetSize) {
297                 found = PETSC_TRUE;
298                 break;
299               }
300             }
301           }
302           ierr = DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
303         }
304         if (found) {
305           adj[off[cell-cStart]+cellOffset] = neighborCells[n];
306           ++cellOffset;
307         }
308       }
309     }
310   }
311   ierr = PetscFree(neighborCells);CHKERRQ(ierr);
312   if (numVertices) *numVertices = numCells;
313   if (offsets)   *offsets   = off;
314   if (adjacency) *adjacency = adj;
315   PetscFunctionReturn(0);
316 }
317 
318 #undef __FUNCT__
319 #define __FUNCT__ "PetscPartitionerRegister"
320 /*@C
321   PetscPartitionerRegister - Adds a new PetscPartitioner implementation
322 
323   Not Collective
324 
325   Input Parameters:
326 + name        - The name of a new user-defined creation routine
327 - create_func - The creation routine itself
328 
329   Notes:
330   PetscPartitionerRegister() may be called multiple times to add several user-defined PetscPartitioners
331 
332   Sample usage:
333 .vb
334     PetscPartitionerRegister("my_part", MyPetscPartitionerCreate);
335 .ve
336 
337   Then, your PetscPartitioner type can be chosen with the procedural interface via
338 .vb
339     PetscPartitionerCreate(MPI_Comm, PetscPartitioner *);
340     PetscPartitionerSetType(PetscPartitioner, "my_part");
341 .ve
342    or at runtime via the option
343 .vb
344     -petscpartitioner_type my_part
345 .ve
346 
347   Level: advanced
348 
349 .keywords: PetscPartitioner, register
350 .seealso: PetscPartitionerRegisterAll(), PetscPartitionerRegisterDestroy()
351 
352 @*/
353 PetscErrorCode PetscPartitionerRegister(const char sname[], PetscErrorCode (*function)(PetscPartitioner))
354 {
355   PetscErrorCode ierr;
356 
357   PetscFunctionBegin;
358   ierr = PetscFunctionListAdd(&PetscPartitionerList, sname, function);CHKERRQ(ierr);
359   PetscFunctionReturn(0);
360 }
361 
362 #undef __FUNCT__
363 #define __FUNCT__ "PetscPartitionerSetType"
364 /*@C
365   PetscPartitionerSetType - Builds a particular PetscPartitioner
366 
367   Collective on PetscPartitioner
368 
369   Input Parameters:
370 + part - The PetscPartitioner object
371 - name - The kind of partitioner
372 
373   Options Database Key:
374 . -petscpartitioner_type <type> - Sets the PetscPartitioner type; use -help for a list of available types
375 
376   Level: intermediate
377 
378 .keywords: PetscPartitioner, set, type
379 .seealso: PetscPartitionerGetType(), PetscPartitionerCreate()
380 @*/
381 PetscErrorCode PetscPartitionerSetType(PetscPartitioner part, PetscPartitionerType name)
382 {
383   PetscErrorCode (*r)(PetscPartitioner);
384   PetscBool      match;
385   PetscErrorCode ierr;
386 
387   PetscFunctionBegin;
388   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
389   ierr = PetscObjectTypeCompare((PetscObject) part, name, &match);CHKERRQ(ierr);
390   if (match) PetscFunctionReturn(0);
391 
392   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
393   ierr = PetscFunctionListFind(PetscPartitionerList, name, &r);CHKERRQ(ierr);
394   if (!r) SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown PetscPartitioner type: %s", name);
395 
396   if (part->ops->destroy) {
397     ierr              = (*part->ops->destroy)(part);CHKERRQ(ierr);
398     part->ops->destroy = NULL;
399   }
400   ierr = (*r)(part);CHKERRQ(ierr);
401   ierr = PetscObjectChangeTypeName((PetscObject) part, name);CHKERRQ(ierr);
402   PetscFunctionReturn(0);
403 }
404 
405 #undef __FUNCT__
406 #define __FUNCT__ "PetscPartitionerGetType"
407 /*@C
408   PetscPartitionerGetType - Gets the PetscPartitioner type name (as a string) from the object.
409 
410   Not Collective
411 
412   Input Parameter:
413 . part - The PetscPartitioner
414 
415   Output Parameter:
416 . name - The PetscPartitioner type name
417 
418   Level: intermediate
419 
420 .keywords: PetscPartitioner, get, type, name
421 .seealso: PetscPartitionerSetType(), PetscPartitionerCreate()
422 @*/
423 PetscErrorCode PetscPartitionerGetType(PetscPartitioner part, PetscPartitionerType *name)
424 {
425   PetscErrorCode ierr;
426 
427   PetscFunctionBegin;
428   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
429   PetscValidPointer(name, 2);
430   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
431   *name = ((PetscObject) part)->type_name;
432   PetscFunctionReturn(0);
433 }
434 
435 #undef __FUNCT__
436 #define __FUNCT__ "PetscPartitionerView"
437 /*@C
438   PetscPartitionerView - Views a PetscPartitioner
439 
440   Collective on PetscPartitioner
441 
442   Input Parameter:
443 + part - the PetscPartitioner object to view
444 - v    - the viewer
445 
446   Level: developer
447 
448 .seealso: PetscPartitionerDestroy()
449 @*/
450 PetscErrorCode PetscPartitionerView(PetscPartitioner part, PetscViewer v)
451 {
452   PetscErrorCode ierr;
453 
454   PetscFunctionBegin;
455   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
456   if (!v) {ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject) part), &v);CHKERRQ(ierr);}
457   if (part->ops->view) {ierr = (*part->ops->view)(part, v);CHKERRQ(ierr);}
458   PetscFunctionReturn(0);
459 }
460 
461 #undef __FUNCT__
462 #define __FUNCT__ "PetscPartitionerSetTypeFromOptions_Internal"
463 PetscErrorCode PetscPartitionerSetTypeFromOptions_Internal(PetscPartitioner part)
464 {
465   const char    *defaultType;
466   char           name[256];
467   PetscBool      flg;
468   PetscErrorCode ierr;
469 
470   PetscFunctionBegin;
471   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
472   if (!((PetscObject) part)->type_name)
473 #if defined(PETSC_HAVE_CHACO)
474     defaultType = PETSCPARTITIONERCHACO;
475 #elif defined(PETSC_HAVE_PARMETIS)
476     defaultType = PETSCPARTITIONERPARMETIS;
477 #else
478     defaultType = PETSCPARTITIONERSIMPLE;
479 #endif
480   else
481     defaultType = ((PetscObject) part)->type_name;
482   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
483 
484   ierr = PetscObjectOptionsBegin((PetscObject) part);CHKERRQ(ierr);
485   ierr = PetscOptionsFList("-petscpartitioner_type", "Graph partitioner", "PetscPartitionerSetType", PetscPartitionerList, defaultType, name, 256, &flg);CHKERRQ(ierr);
486   if (flg) {
487     ierr = PetscPartitionerSetType(part, name);CHKERRQ(ierr);
488   } else if (!((PetscObject) part)->type_name) {
489     ierr = PetscPartitionerSetType(part, defaultType);CHKERRQ(ierr);
490   }
491   ierr = PetscOptionsEnd();CHKERRQ(ierr);
492   PetscFunctionReturn(0);
493 }
494 
495 #undef __FUNCT__
496 #define __FUNCT__ "PetscPartitionerSetFromOptions"
497 /*@
498   PetscPartitionerSetFromOptions - sets parameters in a PetscPartitioner from the options database
499 
500   Collective on PetscPartitioner
501 
502   Input Parameter:
503 . part - the PetscPartitioner object to set options for
504 
505   Level: developer
506 
507 .seealso: PetscPartitionerView()
508 @*/
509 PetscErrorCode PetscPartitionerSetFromOptions(PetscPartitioner part)
510 {
511   PetscErrorCode ierr;
512 
513   PetscFunctionBegin;
514   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
515   ierr = PetscPartitionerSetTypeFromOptions_Internal(part);CHKERRQ(ierr);
516 
517   ierr = PetscObjectOptionsBegin((PetscObject) part);CHKERRQ(ierr);
518   if (part->ops->setfromoptions) {ierr = (*part->ops->setfromoptions)(part);CHKERRQ(ierr);}
519   /* process any options handlers added with PetscObjectAddOptionsHandler() */
520   ierr = PetscObjectProcessOptionsHandlers(PetscOptionsObject,(PetscObject) part);CHKERRQ(ierr);
521   ierr = PetscOptionsEnd();CHKERRQ(ierr);
522   ierr = PetscPartitionerViewFromOptions(part, NULL, "-petscpartitioner_view");CHKERRQ(ierr);
523   PetscFunctionReturn(0);
524 }
525 
526 #undef __FUNCT__
527 #define __FUNCT__ "PetscPartitionerSetUp"
528 /*@C
529   PetscPartitionerSetUp - Construct data structures for the PetscPartitioner
530 
531   Collective on PetscPartitioner
532 
533   Input Parameter:
534 . part - the PetscPartitioner object to setup
535 
536   Level: developer
537 
538 .seealso: PetscPartitionerView(), PetscPartitionerDestroy()
539 @*/
540 PetscErrorCode PetscPartitionerSetUp(PetscPartitioner part)
541 {
542   PetscErrorCode ierr;
543 
544   PetscFunctionBegin;
545   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
546   if (part->ops->setup) {ierr = (*part->ops->setup)(part);CHKERRQ(ierr);}
547   PetscFunctionReturn(0);
548 }
549 
550 #undef __FUNCT__
551 #define __FUNCT__ "PetscPartitionerDestroy"
552 /*@
553   PetscPartitionerDestroy - Destroys a PetscPartitioner object
554 
555   Collective on PetscPartitioner
556 
557   Input Parameter:
558 . part - the PetscPartitioner object to destroy
559 
560   Level: developer
561 
562 .seealso: PetscPartitionerView()
563 @*/
564 PetscErrorCode PetscPartitionerDestroy(PetscPartitioner *part)
565 {
566   PetscErrorCode ierr;
567 
568   PetscFunctionBegin;
569   if (!*part) PetscFunctionReturn(0);
570   PetscValidHeaderSpecific((*part), PETSCPARTITIONER_CLASSID, 1);
571 
572   if (--((PetscObject)(*part))->refct > 0) {*part = 0; PetscFunctionReturn(0);}
573   ((PetscObject) (*part))->refct = 0;
574 
575   if ((*part)->ops->destroy) {ierr = (*(*part)->ops->destroy)(*part);CHKERRQ(ierr);}
576   ierr = PetscHeaderDestroy(part);CHKERRQ(ierr);
577   PetscFunctionReturn(0);
578 }
579 
580 #undef __FUNCT__
581 #define __FUNCT__ "PetscPartitionerCreate"
582 /*@
583   PetscPartitionerCreate - Creates an empty PetscPartitioner object. The type can then be set with PetscPartitionerSetType().
584 
585   Collective on MPI_Comm
586 
587   Input Parameter:
588 . comm - The communicator for the PetscPartitioner object
589 
590   Output Parameter:
591 . part - The PetscPartitioner object
592 
593   Level: beginner
594 
595 .seealso: PetscPartitionerSetType(), PETSCPARTITIONERCHACO, PETSCPARTITIONERPARMETIS, PETSCPARTITIONERSHELL, PETSCPARTITIONERSIMPLE, PETSCPARTITIONERGATHER
596 @*/
597 PetscErrorCode PetscPartitionerCreate(MPI_Comm comm, PetscPartitioner *part)
598 {
599   PetscPartitioner p;
600   PetscErrorCode   ierr;
601 
602   PetscFunctionBegin;
603   PetscValidPointer(part, 2);
604   *part = NULL;
605   ierr = DMInitializePackage();CHKERRQ(ierr);
606 
607   ierr = PetscHeaderCreate(p, PETSCPARTITIONER_CLASSID, "PetscPartitioner", "Graph Partitioner", "PetscPartitioner", comm, PetscPartitionerDestroy, PetscPartitionerView);CHKERRQ(ierr);
608 
609   *part = p;
610   PetscFunctionReturn(0);
611 }
612 
613 #undef __FUNCT__
614 #define __FUNCT__ "PetscPartitionerPartition"
615 /*@
616   PetscPartitionerPartition - Create a non-overlapping partition of the cells in the mesh
617 
618   Collective on DM
619 
620   Input Parameters:
621 + part    - The PetscPartitioner
622 - dm      - The mesh DM
623 
624   Output Parameters:
625 + partSection     - The PetscSection giving the division of points by partition
626 - partition       - The list of points by partition
627 
628   Note: Instead of cells, points at a given height can be partitioned by calling PetscPartitionerSetPointHeight()
629 
630   Level: developer
631 
632 .seealso DMPlexDistribute(), PetscPartitionerSetPointHeight(), PetscPartitionerCreate()
633 @*/
634 PetscErrorCode PetscPartitionerPartition(PetscPartitioner part, DM dm, PetscSection partSection, IS *partition)
635 {
636   PetscMPIInt    size;
637   PetscErrorCode ierr;
638 
639   PetscFunctionBegin;
640   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
641   PetscValidHeaderSpecific(dm, DM_CLASSID, 2);
642   PetscValidHeaderSpecific(partSection, PETSC_SECTION_CLASSID, 4);
643   PetscValidPointer(partition, 5);
644   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) part), &size);CHKERRQ(ierr);
645   if (size == 1) {
646     PetscInt *points;
647     PetscInt  cStart, cEnd, c;
648 
649     ierr = DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);CHKERRQ(ierr);
650     ierr = PetscSectionSetChart(partSection, 0, size);CHKERRQ(ierr);
651     ierr = PetscSectionSetDof(partSection, 0, cEnd-cStart);CHKERRQ(ierr);
652     ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
653     ierr = PetscMalloc1(cEnd-cStart, &points);CHKERRQ(ierr);
654     for (c = cStart; c < cEnd; ++c) points[c] = c;
655     ierr = ISCreateGeneral(PetscObjectComm((PetscObject) part), cEnd-cStart, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
656     PetscFunctionReturn(0);
657   }
658   if (part->height == 0) {
659     PetscInt numVertices;
660     PetscInt *start     = NULL;
661     PetscInt *adjacency = NULL;
662     IS       globalNumbering;
663 
664     ierr = DMPlexCreatePartitionerGraph(dm, 0, &numVertices, &start, &adjacency, &globalNumbering);CHKERRQ(ierr);
665     if (!part->ops->partition) SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_WRONGSTATE, "PetscPartitioner has no type");
666     ierr = (*part->ops->partition)(part, dm, size, numVertices, start, adjacency, partSection, partition);CHKERRQ(ierr);
667     ierr = PetscFree(start);CHKERRQ(ierr);
668     ierr = PetscFree(adjacency);CHKERRQ(ierr);
669     if (globalNumbering) { /* partition is wrt global unique numbering: change this to be wrt local numbering */
670       const PetscInt *globalNum;
671       const PetscInt *partIdx;
672       PetscInt *map, cStart, cEnd;
673       PetscInt *adjusted, i, localSize, offset;
674       IS    newPartition;
675 
676       ierr = ISGetLocalSize(*partition,&localSize);CHKERRQ(ierr);
677       ierr = PetscMalloc1(localSize,&adjusted);CHKERRQ(ierr);
678       ierr = ISGetIndices(globalNumbering,&globalNum);CHKERRQ(ierr);
679       ierr = ISGetIndices(*partition,&partIdx);CHKERRQ(ierr);
680       ierr = PetscMalloc1(localSize,&map);CHKERRQ(ierr);
681       ierr = DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);CHKERRQ(ierr);
682       for (i = cStart, offset = 0; i < cEnd; i++) {
683         if (globalNum[i - cStart] >= 0) map[offset++] = i;
684       }
685       for (i = 0; i < localSize; i++) {
686         adjusted[i] = map[partIdx[i]];
687       }
688       ierr = PetscFree(map);CHKERRQ(ierr);
689       ierr = ISRestoreIndices(*partition,&partIdx);CHKERRQ(ierr);
690       ierr = ISRestoreIndices(globalNumbering,&globalNum);CHKERRQ(ierr);
691       ierr = ISCreateGeneral(PETSC_COMM_SELF,localSize,adjusted,PETSC_OWN_POINTER,&newPartition);CHKERRQ(ierr);
692       ierr = ISDestroy(&globalNumbering);CHKERRQ(ierr);
693       ierr = ISDestroy(partition);CHKERRQ(ierr);
694       *partition = newPartition;
695     }
696   } else SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_OUTOFRANGE, "Invalid height %D for points to partition", part->height);
697   PetscFunctionReturn(0);
698 
699 }
700 
701 #undef __FUNCT__
702 #define __FUNCT__ "PetscPartitionerDestroy_Shell"
703 PetscErrorCode PetscPartitionerDestroy_Shell(PetscPartitioner part)
704 {
705   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
706   PetscErrorCode          ierr;
707 
708   PetscFunctionBegin;
709   ierr = PetscSectionDestroy(&p->section);CHKERRQ(ierr);
710   ierr = ISDestroy(&p->partition);CHKERRQ(ierr);
711   ierr = PetscFree(p);CHKERRQ(ierr);
712   PetscFunctionReturn(0);
713 }
714 
715 #undef __FUNCT__
716 #define __FUNCT__ "PetscPartitionerView_Shell_Ascii"
717 PetscErrorCode PetscPartitionerView_Shell_Ascii(PetscPartitioner part, PetscViewer viewer)
718 {
719   PetscViewerFormat format;
720   PetscErrorCode    ierr;
721 
722   PetscFunctionBegin;
723   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
724   ierr = PetscViewerASCIIPrintf(viewer, "Shell Graph Partitioner:\n");CHKERRQ(ierr);
725   PetscFunctionReturn(0);
726 }
727 
728 #undef __FUNCT__
729 #define __FUNCT__ "PetscPartitionerView_Shell"
730 PetscErrorCode PetscPartitionerView_Shell(PetscPartitioner part, PetscViewer viewer)
731 {
732   PetscBool      iascii;
733   PetscErrorCode ierr;
734 
735   PetscFunctionBegin;
736   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
737   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
738   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
739   if (iascii) {ierr = PetscPartitionerView_Shell_Ascii(part, viewer);CHKERRQ(ierr);}
740   PetscFunctionReturn(0);
741 }
742 
743 #undef __FUNCT__
744 #define __FUNCT__ "PetscPartitionerPartition_Shell"
745 PetscErrorCode PetscPartitionerPartition_Shell(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
746 {
747   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
748   PetscInt                np;
749   PetscErrorCode          ierr;
750 
751   PetscFunctionBegin;
752   ierr = PetscSectionGetChart(p->section, NULL, &np);CHKERRQ(ierr);
753   if (nparts != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of requested partitions %d != configured partitions %d", nparts, np);
754   ierr = ISGetLocalSize(p->partition, &np);CHKERRQ(ierr);
755   if (numVertices != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of input vertices %d != configured vertices %d", numVertices, np);
756   ierr = PetscSectionCopy(p->section, partSection);CHKERRQ(ierr);
757   *partition = p->partition;
758   ierr = PetscObjectReference((PetscObject) p->partition);CHKERRQ(ierr);
759   PetscFunctionReturn(0);
760 }
761 
762 #undef __FUNCT__
763 #define __FUNCT__ "PetscPartitionerInitialize_Shell"
764 PetscErrorCode PetscPartitionerInitialize_Shell(PetscPartitioner part)
765 {
766   PetscFunctionBegin;
767   part->ops->view      = PetscPartitionerView_Shell;
768   part->ops->destroy   = PetscPartitionerDestroy_Shell;
769   part->ops->partition = PetscPartitionerPartition_Shell;
770   PetscFunctionReturn(0);
771 }
772 
773 /*MC
774   PETSCPARTITIONERSHELL = "shell" - A PetscPartitioner object
775 
776   Level: intermediate
777 
778 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
779 M*/
780 
781 #undef __FUNCT__
782 #define __FUNCT__ "PetscPartitionerCreate_Shell"
783 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Shell(PetscPartitioner part)
784 {
785   PetscPartitioner_Shell *p;
786   PetscErrorCode          ierr;
787 
788   PetscFunctionBegin;
789   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
790   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
791   part->data = p;
792 
793   ierr = PetscPartitionerInitialize_Shell(part);CHKERRQ(ierr);
794   PetscFunctionReturn(0);
795 }
796 
797 #undef __FUNCT__
798 #define __FUNCT__ "PetscPartitionerShellSetPartition"
799 /*@C
800   PetscPartitionerShellSetPartition - Set an artifical partition for a mesh
801 
802   Collective on PART
803 
804   Input Parameters:
805 + part     - The PetscPartitioner
806 . numProcs - The number of partitions
807 . sizes    - array of size numProcs (or NULL) providing the number of points in each partition
808 - points   - array of size sum(sizes) (may be NULL iff sizes is NULL) providing the partition each point belongs to
809 
810   Level: developer
811 
812   Notes:
813 
814     It is safe to free the sizes and points arrays after use in this routine.
815 
816 .seealso DMPlexDistribute(), PetscPartitionerCreate()
817 @*/
818 PetscErrorCode PetscPartitionerShellSetPartition(PetscPartitioner part, PetscInt numProcs, const PetscInt sizes[], const PetscInt points[])
819 {
820   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
821   PetscInt                proc, numPoints;
822   PetscErrorCode          ierr;
823 
824   PetscFunctionBegin;
825   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
826   if (sizes) {PetscValidPointer(sizes, 3);}
827   if (sizes) {PetscValidPointer(points, 4);}
828   ierr = PetscSectionDestroy(&p->section);CHKERRQ(ierr);
829   ierr = ISDestroy(&p->partition);CHKERRQ(ierr);
830   ierr = PetscSectionCreate(PetscObjectComm((PetscObject) part), &p->section);CHKERRQ(ierr);
831   ierr = PetscSectionSetChart(p->section, 0, numProcs);CHKERRQ(ierr);
832   if (sizes) {
833     for (proc = 0; proc < numProcs; ++proc) {
834       ierr = PetscSectionSetDof(p->section, proc, sizes[proc]);CHKERRQ(ierr);
835     }
836   }
837   ierr = PetscSectionSetUp(p->section);CHKERRQ(ierr);
838   ierr = PetscSectionGetStorageSize(p->section, &numPoints);CHKERRQ(ierr);
839   ierr = ISCreateGeneral(PetscObjectComm((PetscObject) part), numPoints, points, PETSC_COPY_VALUES, &p->partition);CHKERRQ(ierr);
840   PetscFunctionReturn(0);
841 }
842 
843 #undef __FUNCT__
844 #define __FUNCT__ "PetscPartitionerDestroy_Simple"
845 PetscErrorCode PetscPartitionerDestroy_Simple(PetscPartitioner part)
846 {
847   PetscPartitioner_Simple *p = (PetscPartitioner_Simple *) part->data;
848   PetscErrorCode          ierr;
849 
850   PetscFunctionBegin;
851   ierr = PetscFree(p);CHKERRQ(ierr);
852   PetscFunctionReturn(0);
853 }
854 
855 #undef __FUNCT__
856 #define __FUNCT__ "PetscPartitionerView_Simple_Ascii"
857 PetscErrorCode PetscPartitionerView_Simple_Ascii(PetscPartitioner part, PetscViewer viewer)
858 {
859   PetscViewerFormat format;
860   PetscErrorCode    ierr;
861 
862   PetscFunctionBegin;
863   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
864   ierr = PetscViewerASCIIPrintf(viewer, "Simple Graph Partitioner:\n");CHKERRQ(ierr);
865   PetscFunctionReturn(0);
866 }
867 
868 #undef __FUNCT__
869 #define __FUNCT__ "PetscPartitionerView_Simple"
870 PetscErrorCode PetscPartitionerView_Simple(PetscPartitioner part, PetscViewer viewer)
871 {
872   PetscBool      iascii;
873   PetscErrorCode ierr;
874 
875   PetscFunctionBegin;
876   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
877   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
878   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
879   if (iascii) {ierr = PetscPartitionerView_Simple_Ascii(part, viewer);CHKERRQ(ierr);}
880   PetscFunctionReturn(0);
881 }
882 
883 #undef __FUNCT__
884 #define __FUNCT__ "PetscPartitionerPartition_Simple"
885 PetscErrorCode PetscPartitionerPartition_Simple(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
886 {
887   MPI_Comm       comm;
888   PetscInt       np;
889   PetscMPIInt    size;
890   PetscErrorCode ierr;
891 
892   PetscFunctionBegin;
893   comm = PetscObjectComm((PetscObject)dm);
894   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
895   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
896   ierr = ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);CHKERRQ(ierr);
897   if (size == 1) {
898     for (np = 0; np < nparts; ++np) {ierr = PetscSectionSetDof(partSection, np, numVertices/nparts + ((numVertices % nparts) > np));CHKERRQ(ierr);}
899   }
900   else {
901     PetscMPIInt rank;
902     PetscInt nvGlobal, *offsets, myFirst, myLast;
903 
904     ierr = PetscMalloc1(size+1,&offsets);CHKERRQ(ierr);
905     offsets[0] = 0;
906     ierr = MPI_Allgather(&numVertices,1,MPIU_INT,&offsets[1],1,MPIU_INT,comm);CHKERRQ(ierr);
907     for (np = 2; np <= size; np++) {
908       offsets[np] += offsets[np-1];
909     }
910     nvGlobal = offsets[size];
911     ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
912     myFirst = offsets[rank];
913     myLast  = offsets[rank + 1] - 1;
914     ierr = PetscFree(offsets);CHKERRQ(ierr);
915     if (numVertices) {
916       PetscInt firstPart = 0, firstLargePart = 0;
917       PetscInt lastPart = 0, lastLargePart = 0;
918       PetscInt rem = nvGlobal % nparts;
919       PetscInt pSmall = nvGlobal/nparts;
920       PetscInt pBig = nvGlobal/nparts + 1;
921 
922 
923       if (rem) {
924         firstLargePart = myFirst / pBig;
925         lastLargePart  = myLast  / pBig;
926 
927         if (firstLargePart < rem) {
928           firstPart = firstLargePart;
929         }
930         else {
931           firstPart = rem + (myFirst - (rem * pBig)) / pSmall;
932         }
933         if (lastLargePart < rem) {
934           lastPart = lastLargePart;
935         }
936         else {
937           lastPart = rem + (myLast - (rem * pBig)) / pSmall;
938         }
939       }
940       else {
941         firstPart = myFirst / (nvGlobal/nparts);
942         lastPart  = myLast  / (nvGlobal/nparts);
943       }
944 
945       for (np = firstPart; np <= lastPart; np++) {
946         PetscInt PartStart =  np    * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np);
947         PetscInt PartEnd   = (np+1) * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np+1);
948 
949         PartStart = PetscMax(PartStart,myFirst);
950         PartEnd   = PetscMin(PartEnd,myLast+1);
951         ierr = PetscSectionSetDof(partSection,np,PartEnd-PartStart);CHKERRQ(ierr);
952       }
953     }
954   }
955   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
956   PetscFunctionReturn(0);
957 }
958 
959 #undef __FUNCT__
960 #define __FUNCT__ "PetscPartitionerInitialize_Simple"
961 PetscErrorCode PetscPartitionerInitialize_Simple(PetscPartitioner part)
962 {
963   PetscFunctionBegin;
964   part->ops->view      = PetscPartitionerView_Simple;
965   part->ops->destroy   = PetscPartitionerDestroy_Simple;
966   part->ops->partition = PetscPartitionerPartition_Simple;
967   PetscFunctionReturn(0);
968 }
969 
970 /*MC
971   PETSCPARTITIONERSIMPLE = "simple" - A PetscPartitioner object
972 
973   Level: intermediate
974 
975 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
976 M*/
977 
978 #undef __FUNCT__
979 #define __FUNCT__ "PetscPartitionerCreate_Simple"
980 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Simple(PetscPartitioner part)
981 {
982   PetscPartitioner_Simple *p;
983   PetscErrorCode           ierr;
984 
985   PetscFunctionBegin;
986   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
987   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
988   part->data = p;
989 
990   ierr = PetscPartitionerInitialize_Simple(part);CHKERRQ(ierr);
991   PetscFunctionReturn(0);
992 }
993 
994 #undef __FUNCT__
995 #define __FUNCT__ "PetscPartitionerDestroy_Gather"
996 PetscErrorCode PetscPartitionerDestroy_Gather(PetscPartitioner part)
997 {
998   PetscPartitioner_Gather *p = (PetscPartitioner_Gather *) part->data;
999   PetscErrorCode          ierr;
1000 
1001   PetscFunctionBegin;
1002   ierr = PetscFree(p);CHKERRQ(ierr);
1003   PetscFunctionReturn(0);
1004 }
1005 
1006 #undef __FUNCT__
1007 #define __FUNCT__ "PetscPartitionerView_Gather_Ascii"
1008 PetscErrorCode PetscPartitionerView_Gather_Ascii(PetscPartitioner part, PetscViewer viewer)
1009 {
1010   PetscViewerFormat format;
1011   PetscErrorCode    ierr;
1012 
1013   PetscFunctionBegin;
1014   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
1015   ierr = PetscViewerASCIIPrintf(viewer, "Gather Graph Partitioner:\n");CHKERRQ(ierr);
1016   PetscFunctionReturn(0);
1017 }
1018 
1019 #undef __FUNCT__
1020 #define __FUNCT__ "PetscPartitionerView_Gather"
1021 PetscErrorCode PetscPartitionerView_Gather(PetscPartitioner part, PetscViewer viewer)
1022 {
1023   PetscBool      iascii;
1024   PetscErrorCode ierr;
1025 
1026   PetscFunctionBegin;
1027   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1028   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1029   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1030   if (iascii) {ierr = PetscPartitionerView_Gather_Ascii(part, viewer);CHKERRQ(ierr);}
1031   PetscFunctionReturn(0);
1032 }
1033 
1034 #undef __FUNCT__
1035 #define __FUNCT__ "PetscPartitionerPartition_Gather"
1036 PetscErrorCode PetscPartitionerPartition_Gather(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1037 {
1038   PetscInt       np;
1039   PetscErrorCode ierr;
1040 
1041   PetscFunctionBegin;
1042   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1043   ierr = ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);CHKERRQ(ierr);
1044   ierr = PetscSectionSetDof(partSection,0,numVertices);CHKERRQ(ierr);
1045   for (np = 1; np < nparts; ++np) {ierr = PetscSectionSetDof(partSection, np, 0);CHKERRQ(ierr);}
1046   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1047   PetscFunctionReturn(0);
1048 }
1049 
1050 #undef __FUNCT__
1051 #define __FUNCT__ "PetscPartitionerInitialize_Gather"
1052 PetscErrorCode PetscPartitionerInitialize_Gather(PetscPartitioner part)
1053 {
1054   PetscFunctionBegin;
1055   part->ops->view      = PetscPartitionerView_Gather;
1056   part->ops->destroy   = PetscPartitionerDestroy_Gather;
1057   part->ops->partition = PetscPartitionerPartition_Gather;
1058   PetscFunctionReturn(0);
1059 }
1060 
1061 /*MC
1062   PETSCPARTITIONERGATHER = "gather" - A PetscPartitioner object
1063 
1064   Level: intermediate
1065 
1066 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1067 M*/
1068 
1069 #undef __FUNCT__
1070 #define __FUNCT__ "PetscPartitionerCreate_Gather"
1071 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Gather(PetscPartitioner part)
1072 {
1073   PetscPartitioner_Gather *p;
1074   PetscErrorCode           ierr;
1075 
1076   PetscFunctionBegin;
1077   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1078   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1079   part->data = p;
1080 
1081   ierr = PetscPartitionerInitialize_Gather(part);CHKERRQ(ierr);
1082   PetscFunctionReturn(0);
1083 }
1084 
1085 
1086 #undef __FUNCT__
1087 #define __FUNCT__ "PetscPartitionerDestroy_Chaco"
1088 PetscErrorCode PetscPartitionerDestroy_Chaco(PetscPartitioner part)
1089 {
1090   PetscPartitioner_Chaco *p = (PetscPartitioner_Chaco *) part->data;
1091   PetscErrorCode          ierr;
1092 
1093   PetscFunctionBegin;
1094   ierr = PetscFree(p);CHKERRQ(ierr);
1095   PetscFunctionReturn(0);
1096 }
1097 
1098 #undef __FUNCT__
1099 #define __FUNCT__ "PetscPartitionerView_Chaco_Ascii"
1100 PetscErrorCode PetscPartitionerView_Chaco_Ascii(PetscPartitioner part, PetscViewer viewer)
1101 {
1102   PetscViewerFormat format;
1103   PetscErrorCode    ierr;
1104 
1105   PetscFunctionBegin;
1106   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
1107   ierr = PetscViewerASCIIPrintf(viewer, "Chaco Graph Partitioner:\n");CHKERRQ(ierr);
1108   PetscFunctionReturn(0);
1109 }
1110 
1111 #undef __FUNCT__
1112 #define __FUNCT__ "PetscPartitionerView_Chaco"
1113 PetscErrorCode PetscPartitionerView_Chaco(PetscPartitioner part, PetscViewer viewer)
1114 {
1115   PetscBool      iascii;
1116   PetscErrorCode ierr;
1117 
1118   PetscFunctionBegin;
1119   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1120   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1121   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1122   if (iascii) {ierr = PetscPartitionerView_Chaco_Ascii(part, viewer);CHKERRQ(ierr);}
1123   PetscFunctionReturn(0);
1124 }
1125 
1126 #if defined(PETSC_HAVE_CHACO)
1127 #if defined(PETSC_HAVE_UNISTD_H)
1128 #include <unistd.h>
1129 #endif
1130 /* Chaco does not have an include file */
1131 PETSC_EXTERN int interface(int nvtxs, int *start, int *adjacency, int *vwgts,
1132                        float *ewgts, float *x, float *y, float *z, char *outassignname,
1133                        char *outfilename, short *assignment, int architecture, int ndims_tot,
1134                        int mesh_dims[3], double *goal, int global_method, int local_method,
1135                        int rqi_flag, int vmax, int ndims, double eigtol, long seed);
1136 
1137 extern int FREE_GRAPH;
1138 #endif
1139 
1140 #undef __FUNCT__
1141 #define __FUNCT__ "PetscPartitionerPartition_Chaco"
1142 PetscErrorCode PetscPartitionerPartition_Chaco(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1143 {
1144 #if defined(PETSC_HAVE_CHACO)
1145   enum {DEFAULT_METHOD = 1, INERTIAL_METHOD = 3};
1146   MPI_Comm       comm;
1147   int            nvtxs          = numVertices; /* number of vertices in full graph */
1148   int           *vwgts          = NULL;   /* weights for all vertices */
1149   float         *ewgts          = NULL;   /* weights for all edges */
1150   float         *x              = NULL, *y = NULL, *z = NULL; /* coordinates for inertial method */
1151   char          *outassignname  = NULL;   /*  name of assignment output file */
1152   char          *outfilename    = NULL;   /* output file name */
1153   int            architecture   = 1;      /* 0 => hypercube, d => d-dimensional mesh */
1154   int            ndims_tot      = 0;      /* total number of cube dimensions to divide */
1155   int            mesh_dims[3];            /* dimensions of mesh of processors */
1156   double        *goal          = NULL;    /* desired set sizes for each set */
1157   int            global_method = 1;       /* global partitioning algorithm */
1158   int            local_method  = 1;       /* local partitioning algorithm */
1159   int            rqi_flag      = 0;       /* should I use RQI/Symmlq eigensolver? */
1160   int            vmax          = 200;     /* how many vertices to coarsen down to? */
1161   int            ndims         = 1;       /* number of eigenvectors (2^d sets) */
1162   double         eigtol        = 0.001;   /* tolerance on eigenvectors */
1163   long           seed          = 123636512; /* for random graph mutations */
1164   short int     *assignment;              /* Output partition */
1165   int            fd_stdout, fd_pipe[2];
1166   PetscInt      *points;
1167   int            i, v, p;
1168   PetscErrorCode ierr;
1169 
1170   PetscFunctionBegin;
1171   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
1172   if (!numVertices) {
1173     ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1174     ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1175     ierr = ISCreateGeneral(comm, 0, NULL, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1176     PetscFunctionReturn(0);
1177   }
1178   FREE_GRAPH = 0;                         /* Do not let Chaco free my memory */
1179   for (i = 0; i < start[numVertices]; ++i) ++adjacency[i];
1180 
1181   if (global_method == INERTIAL_METHOD) {
1182     /* manager.createCellCoordinates(nvtxs, &x, &y, &z); */
1183     SETERRQ(comm, PETSC_ERR_SUP, "Inertial partitioning not yet supported");
1184   }
1185   mesh_dims[0] = nparts;
1186   mesh_dims[1] = 1;
1187   mesh_dims[2] = 1;
1188   ierr = PetscMalloc1(nvtxs, &assignment);CHKERRQ(ierr);
1189   /* Chaco outputs to stdout. We redirect this to a buffer. */
1190   /* TODO: check error codes for UNIX calls */
1191 #if defined(PETSC_HAVE_UNISTD_H)
1192   {
1193     int piperet;
1194     piperet = pipe(fd_pipe);
1195     if (piperet) SETERRQ(comm,PETSC_ERR_SYS,"Could not create pipe");
1196     fd_stdout = dup(1);
1197     close(1);
1198     dup2(fd_pipe[1], 1);
1199   }
1200 #endif
1201   ierr = interface(nvtxs, (int*) start, (int*) adjacency, vwgts, ewgts, x, y, z, outassignname, outfilename,
1202                    assignment, architecture, ndims_tot, mesh_dims, goal, global_method, local_method, rqi_flag,
1203                    vmax, ndims, eigtol, seed);
1204 #if defined(PETSC_HAVE_UNISTD_H)
1205   {
1206     char msgLog[10000];
1207     int  count;
1208 
1209     fflush(stdout);
1210     count = read(fd_pipe[0], msgLog, (10000-1)*sizeof(char));
1211     if (count < 0) count = 0;
1212     msgLog[count] = 0;
1213     close(1);
1214     dup2(fd_stdout, 1);
1215     close(fd_stdout);
1216     close(fd_pipe[0]);
1217     close(fd_pipe[1]);
1218     if (ierr) SETERRQ1(comm, PETSC_ERR_LIB, "Error in Chaco library: %s", msgLog);
1219   }
1220 #endif
1221   /* Convert to PetscSection+IS */
1222   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1223   for (v = 0; v < nvtxs; ++v) {
1224     ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);
1225   }
1226   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1227   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
1228   for (p = 0, i = 0; p < nparts; ++p) {
1229     for (v = 0; v < nvtxs; ++v) {
1230       if (assignment[v] == p) points[i++] = v;
1231     }
1232   }
1233   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1234   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1235   if (global_method == INERTIAL_METHOD) {
1236     /* manager.destroyCellCoordinates(nvtxs, &x, &y, &z); */
1237   }
1238   ierr = PetscFree(assignment);CHKERRQ(ierr);
1239   for (i = 0; i < start[numVertices]; ++i) --adjacency[i];
1240   PetscFunctionReturn(0);
1241 #else
1242   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-chaco.");
1243 #endif
1244 }
1245 
1246 #undef __FUNCT__
1247 #define __FUNCT__ "PetscPartitionerInitialize_Chaco"
1248 PetscErrorCode PetscPartitionerInitialize_Chaco(PetscPartitioner part)
1249 {
1250   PetscFunctionBegin;
1251   part->ops->view      = PetscPartitionerView_Chaco;
1252   part->ops->destroy   = PetscPartitionerDestroy_Chaco;
1253   part->ops->partition = PetscPartitionerPartition_Chaco;
1254   PetscFunctionReturn(0);
1255 }
1256 
1257 /*MC
1258   PETSCPARTITIONERCHACO = "chaco" - A PetscPartitioner object using the Chaco library
1259 
1260   Level: intermediate
1261 
1262 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1263 M*/
1264 
1265 #undef __FUNCT__
1266 #define __FUNCT__ "PetscPartitionerCreate_Chaco"
1267 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Chaco(PetscPartitioner part)
1268 {
1269   PetscPartitioner_Chaco *p;
1270   PetscErrorCode          ierr;
1271 
1272   PetscFunctionBegin;
1273   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1274   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1275   part->data = p;
1276 
1277   ierr = PetscPartitionerInitialize_Chaco(part);CHKERRQ(ierr);
1278   ierr = PetscCitationsRegister(ChacoPartitionerCitation, &ChacoPartitionercite);CHKERRQ(ierr);
1279   PetscFunctionReturn(0);
1280 }
1281 
1282 #undef __FUNCT__
1283 #define __FUNCT__ "PetscPartitionerDestroy_ParMetis"
1284 PetscErrorCode PetscPartitionerDestroy_ParMetis(PetscPartitioner part)
1285 {
1286   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
1287   PetscErrorCode             ierr;
1288 
1289   PetscFunctionBegin;
1290   ierr = PetscFree(p);CHKERRQ(ierr);
1291   PetscFunctionReturn(0);
1292 }
1293 
1294 #undef __FUNCT__
1295 #define __FUNCT__ "PetscPartitionerView_ParMetis_Ascii"
1296 PetscErrorCode PetscPartitionerView_ParMetis_Ascii(PetscPartitioner part, PetscViewer viewer)
1297 {
1298   PetscViewerFormat format;
1299   PetscErrorCode    ierr;
1300 
1301   PetscFunctionBegin;
1302   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
1303   ierr = PetscViewerASCIIPrintf(viewer, "ParMetis Graph Partitioner:\n");CHKERRQ(ierr);
1304   PetscFunctionReturn(0);
1305 }
1306 
1307 #undef __FUNCT__
1308 #define __FUNCT__ "PetscPartitionerView_ParMetis"
1309 PetscErrorCode PetscPartitionerView_ParMetis(PetscPartitioner part, PetscViewer viewer)
1310 {
1311   PetscBool      iascii;
1312   PetscErrorCode ierr;
1313 
1314   PetscFunctionBegin;
1315   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1316   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1317   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1318   if (iascii) {ierr = PetscPartitionerView_ParMetis_Ascii(part, viewer);CHKERRQ(ierr);}
1319   PetscFunctionReturn(0);
1320 }
1321 
1322 #if defined(PETSC_HAVE_PARMETIS)
1323 #include <parmetis.h>
1324 #endif
1325 
1326 #undef __FUNCT__
1327 #define __FUNCT__ "PetscPartitionerPartition_ParMetis"
1328 PetscErrorCode PetscPartitionerPartition_ParMetis(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1329 {
1330 #if defined(PETSC_HAVE_PARMETIS)
1331   MPI_Comm       comm;
1332   PetscSection   section;
1333   PetscInt       nvtxs      = numVertices; /* The number of vertices in full graph */
1334   PetscInt      *vtxdist;                  /* Distribution of vertices across processes */
1335   PetscInt      *xadj       = start;       /* Start of edge list for each vertex */
1336   PetscInt      *adjncy     = adjacency;   /* Edge lists for all vertices */
1337   PetscInt      *vwgt       = NULL;        /* Vertex weights */
1338   PetscInt      *adjwgt     = NULL;        /* Edge weights */
1339   PetscInt       wgtflag    = 0;           /* Indicates which weights are present */
1340   PetscInt       numflag    = 0;           /* Indicates initial offset (0 or 1) */
1341   PetscInt       ncon       = 1;           /* The number of weights per vertex */
1342   PetscReal     *tpwgts;                   /* The fraction of vertex weights assigned to each partition */
1343   PetscReal     *ubvec;                    /* The balance intolerance for vertex weights */
1344   PetscInt       options[5];               /* Options */
1345   /* Outputs */
1346   PetscInt       edgeCut;                  /* The number of edges cut by the partition */
1347   PetscInt      *assignment, *points;
1348   PetscMPIInt    rank, p, v, i;
1349   PetscErrorCode ierr;
1350 
1351   PetscFunctionBegin;
1352   ierr = PetscObjectGetComm((PetscObject) part, &comm);CHKERRQ(ierr);
1353   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1354   options[0] = 0; /* Use all defaults */
1355   /* Calculate vertex distribution */
1356   ierr = PetscMalloc5(nparts+1,&vtxdist,nparts*ncon,&tpwgts,ncon,&ubvec,nvtxs,&assignment,nvtxs,&vwgt);CHKERRQ(ierr);
1357   vtxdist[0] = 0;
1358   ierr = MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm);CHKERRQ(ierr);
1359   for (p = 2; p <= nparts; ++p) {
1360     vtxdist[p] += vtxdist[p-1];
1361   }
1362   /* Calculate weights */
1363   for (p = 0; p < nparts; ++p) {
1364     tpwgts[p] = 1.0/nparts;
1365   }
1366   ubvec[0] = 1.05;
1367   /* Weight cells by dofs on cell by default */
1368   ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
1369   if (section) {
1370     PetscInt cStart, cEnd, dof;
1371 
1372     ierr = DMPlexGetHeightStratum(dm, 0, &cStart, &cEnd);CHKERRQ(ierr);
1373     for (v = cStart; v < cEnd; ++v) {
1374       ierr = PetscSectionGetDof(section, v, &dof);CHKERRQ(ierr);
1375       vwgt[v-cStart] = PetscMax(dof, 1);
1376     }
1377   } else {
1378     for (v = 0; v < nvtxs; ++v) vwgt[v] = 1;
1379   }
1380 
1381   if (nparts == 1) {
1382     ierr = PetscMemzero(assignment, nvtxs * sizeof(PetscInt));CHKERRQ(ierr);
1383   } else {
1384     if (vtxdist[1] == vtxdist[nparts]) {
1385       if (!rank) {
1386         PetscStackPush("METIS_PartGraphKway");
1387         ierr = METIS_PartGraphKway(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, adjwgt, &nparts, tpwgts, ubvec, NULL, &edgeCut, assignment);
1388         PetscStackPop;
1389         if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphKway()");
1390       }
1391     } else {
1392       PetscStackPush("ParMETIS_V3_PartKway");
1393       ierr = ParMETIS_V3_PartKway(vtxdist, xadj, adjncy, vwgt, adjwgt, &wgtflag, &numflag, &ncon, &nparts, tpwgts, ubvec, options, &edgeCut, assignment, &comm);
1394       PetscStackPop;
1395       if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in ParMETIS_V3_PartKway()");
1396     }
1397   }
1398   /* Convert to PetscSection+IS */
1399   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1400   for (v = 0; v < nvtxs; ++v) {ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);}
1401   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1402   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
1403   for (p = 0, i = 0; p < nparts; ++p) {
1404     for (v = 0; v < nvtxs; ++v) {
1405       if (assignment[v] == p) points[i++] = v;
1406     }
1407   }
1408   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1409   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1410   ierr = PetscFree5(vtxdist,tpwgts,ubvec,assignment,vwgt);CHKERRQ(ierr);
1411   PetscFunctionReturn(0);
1412 #else
1413   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-parmetis.");
1414 #endif
1415 }
1416 
1417 #undef __FUNCT__
1418 #define __FUNCT__ "PetscPartitionerInitialize_ParMetis"
1419 PetscErrorCode PetscPartitionerInitialize_ParMetis(PetscPartitioner part)
1420 {
1421   PetscFunctionBegin;
1422   part->ops->view      = PetscPartitionerView_ParMetis;
1423   part->ops->destroy   = PetscPartitionerDestroy_ParMetis;
1424   part->ops->partition = PetscPartitionerPartition_ParMetis;
1425   PetscFunctionReturn(0);
1426 }
1427 
1428 /*MC
1429   PETSCPARTITIONERPARMETIS = "parmetis" - A PetscPartitioner object using the ParMetis library
1430 
1431   Level: intermediate
1432 
1433 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1434 M*/
1435 
1436 #undef __FUNCT__
1437 #define __FUNCT__ "PetscPartitionerCreate_ParMetis"
1438 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_ParMetis(PetscPartitioner part)
1439 {
1440   PetscPartitioner_ParMetis *p;
1441   PetscErrorCode          ierr;
1442 
1443   PetscFunctionBegin;
1444   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1445   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1446   part->data = p;
1447 
1448   ierr = PetscPartitionerInitialize_ParMetis(part);CHKERRQ(ierr);
1449   ierr = PetscCitationsRegister(ParMetisPartitionerCitation, &ParMetisPartitionercite);CHKERRQ(ierr);
1450   PetscFunctionReturn(0);
1451 }
1452 
1453 #undef __FUNCT__
1454 #define __FUNCT__ "DMPlexGetPartitioner"
1455 /*@
1456   DMPlexGetPartitioner - Get the mesh partitioner
1457 
1458   Not collective
1459 
1460   Input Parameter:
1461 . dm - The DM
1462 
1463   Output Parameter:
1464 . part - The PetscPartitioner
1465 
1466   Level: developer
1467 
1468   Note: This gets a borrowed reference, so the user should not destroy this PetscPartitioner.
1469 
1470 .seealso DMPlexDistribute(), DMPlexSetPartitioner(), PetscPartitionerCreate()
1471 @*/
1472 PetscErrorCode DMPlexGetPartitioner(DM dm, PetscPartitioner *part)
1473 {
1474   DM_Plex *mesh = (DM_Plex *) dm->data;
1475 
1476   PetscFunctionBegin;
1477   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1478   PetscValidPointer(part, 2);
1479   *part = mesh->partitioner;
1480   PetscFunctionReturn(0);
1481 }
1482 
1483 #undef __FUNCT__
1484 #define __FUNCT__ "DMPlexSetPartitioner"
1485 /*@
1486   DMPlexSetPartitioner - Set the mesh partitioner
1487 
1488   logically collective on dm and part
1489 
1490   Input Parameters:
1491 + dm - The DM
1492 - part - The partitioner
1493 
1494   Level: developer
1495 
1496   Note: Any existing PetscPartitioner will be destroyed.
1497 
1498 .seealso DMPlexDistribute(), DMPlexGetPartitioner(), PetscPartitionerCreate()
1499 @*/
1500 PetscErrorCode DMPlexSetPartitioner(DM dm, PetscPartitioner part)
1501 {
1502   DM_Plex       *mesh = (DM_Plex *) dm->data;
1503   PetscErrorCode ierr;
1504 
1505   PetscFunctionBegin;
1506   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1507   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 2);
1508   ierr = PetscObjectReference((PetscObject)part);CHKERRQ(ierr);
1509   ierr = PetscPartitionerDestroy(&mesh->partitioner);CHKERRQ(ierr);
1510   mesh->partitioner = part;
1511   PetscFunctionReturn(0);
1512 }
1513 
1514 #undef __FUNCT__
1515 #define __FUNCT__ "DMPlexPartitionLabelClosure_Tree"
1516 static PetscErrorCode DMPlexPartitionLabelClosure_Tree(DM dm, DMLabel label, PetscInt rank, PetscInt point, PetscBool up, PetscBool down)
1517 {
1518   PetscErrorCode ierr;
1519 
1520   PetscFunctionBegin;
1521   if (up) {
1522     PetscInt parent;
1523 
1524     ierr = DMPlexGetTreeParent(dm,point,&parent,NULL);CHKERRQ(ierr);
1525     if (parent != point) {
1526       PetscInt closureSize, *closure = NULL, i;
1527 
1528       ierr = DMPlexGetTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);CHKERRQ(ierr);
1529       for (i = 0; i < closureSize; i++) {
1530         PetscInt cpoint = closure[2*i];
1531 
1532         ierr = DMLabelSetValue(label,cpoint,rank);CHKERRQ(ierr);
1533         ierr = DMPlexPartitionLabelClosure_Tree(dm,label,rank,cpoint,PETSC_TRUE,PETSC_FALSE);CHKERRQ(ierr);
1534       }
1535       ierr = DMPlexRestoreTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);CHKERRQ(ierr);
1536     }
1537   }
1538   if (down) {
1539     PetscInt numChildren;
1540     const PetscInt *children;
1541 
1542     ierr = DMPlexGetTreeChildren(dm,point,&numChildren,&children);CHKERRQ(ierr);
1543     if (numChildren) {
1544       PetscInt i;
1545 
1546       for (i = 0; i < numChildren; i++) {
1547         PetscInt cpoint = children[i];
1548 
1549         ierr = DMLabelSetValue(label,cpoint,rank);CHKERRQ(ierr);
1550         ierr = DMPlexPartitionLabelClosure_Tree(dm,label,rank,cpoint,PETSC_FALSE,PETSC_TRUE);CHKERRQ(ierr);
1551       }
1552     }
1553   }
1554   PetscFunctionReturn(0);
1555 }
1556 
1557 #undef __FUNCT__
1558 #define __FUNCT__ "DMPlexPartitionLabelClosure"
1559 /*@
1560   DMPlexPartitionLabelClosure - Add the closure of all points to the partition label
1561 
1562   Input Parameters:
1563 + dm     - The DM
1564 - label  - DMLabel assinging ranks to remote roots
1565 
1566   Level: developer
1567 
1568 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1569 @*/
1570 PetscErrorCode DMPlexPartitionLabelClosure(DM dm, DMLabel label)
1571 {
1572   IS              rankIS,   pointIS;
1573   const PetscInt *ranks,   *points;
1574   PetscInt        numRanks, numPoints, r, p, c, closureSize;
1575   PetscInt       *closure = NULL;
1576   PetscErrorCode  ierr;
1577 
1578   PetscFunctionBegin;
1579   ierr = DMLabelGetValueIS(label, &rankIS);CHKERRQ(ierr);
1580   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
1581   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
1582   for (r = 0; r < numRanks; ++r) {
1583     const PetscInt rank = ranks[r];
1584 
1585     ierr = DMLabelGetStratumIS(label, rank, &pointIS);CHKERRQ(ierr);
1586     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
1587     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
1588     for (p = 0; p < numPoints; ++p) {
1589       ierr = DMPlexGetTransitiveClosure(dm, points[p], PETSC_TRUE, &closureSize, &closure);CHKERRQ(ierr);
1590       for (c = 0; c < closureSize*2; c += 2) {
1591         ierr = DMLabelSetValue(label, closure[c], rank);CHKERRQ(ierr);
1592         ierr = DMPlexPartitionLabelClosure_Tree(dm,label,rank,closure[c],PETSC_TRUE,PETSC_TRUE);CHKERRQ(ierr);
1593       }
1594     }
1595     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
1596     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1597   }
1598   if (closure) {ierr = DMPlexRestoreTransitiveClosure(dm, 0, PETSC_TRUE, &closureSize, &closure);CHKERRQ(ierr);}
1599   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
1600   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
1601   PetscFunctionReturn(0);
1602 }
1603 
1604 #undef __FUNCT__
1605 #define __FUNCT__ "DMPlexPartitionLabelAdjacency"
1606 /*@
1607   DMPlexPartitionLabelAdjacency - Add one level of adjacent points to the partition label
1608 
1609   Input Parameters:
1610 + dm     - The DM
1611 - label  - DMLabel assinging ranks to remote roots
1612 
1613   Level: developer
1614 
1615 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1616 @*/
1617 PetscErrorCode DMPlexPartitionLabelAdjacency(DM dm, DMLabel label)
1618 {
1619   IS              rankIS,   pointIS;
1620   const PetscInt *ranks,   *points;
1621   PetscInt        numRanks, numPoints, r, p, a, adjSize;
1622   PetscInt       *adj = NULL;
1623   PetscErrorCode  ierr;
1624 
1625   PetscFunctionBegin;
1626   ierr = DMLabelGetValueIS(label, &rankIS);CHKERRQ(ierr);
1627   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
1628   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
1629   for (r = 0; r < numRanks; ++r) {
1630     const PetscInt rank = ranks[r];
1631 
1632     ierr = DMLabelGetStratumIS(label, rank, &pointIS);CHKERRQ(ierr);
1633     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
1634     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
1635     for (p = 0; p < numPoints; ++p) {
1636       adjSize = PETSC_DETERMINE;
1637       ierr = DMPlexGetAdjacency(dm, points[p], &adjSize, &adj);CHKERRQ(ierr);
1638       for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(label, adj[a], rank);CHKERRQ(ierr);}
1639     }
1640     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
1641     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1642   }
1643   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
1644   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
1645   ierr = PetscFree(adj);CHKERRQ(ierr);
1646   PetscFunctionReturn(0);
1647 }
1648 
1649 #undef __FUNCT__
1650 #define __FUNCT__ "DMPlexPartitionLabelPropagate"
1651 /*@
1652   DMPlexPartitionLabelPropagate - Propagate points in a partition label over the point SF
1653 
1654   Input Parameters:
1655 + dm     - The DM
1656 - label  - DMLabel assinging ranks to remote roots
1657 
1658   Level: developer
1659 
1660   Note: This is required when generating multi-level overlaps to capture
1661   overlap points from non-neighbouring partitions.
1662 
1663 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1664 @*/
1665 PetscErrorCode DMPlexPartitionLabelPropagate(DM dm, DMLabel label)
1666 {
1667   MPI_Comm        comm;
1668   PetscMPIInt     rank;
1669   PetscSF         sfPoint;
1670   DMLabel         lblRoots, lblLeaves;
1671   IS              rankIS, pointIS;
1672   const PetscInt *ranks;
1673   PetscInt        numRanks, r;
1674   PetscErrorCode  ierr;
1675 
1676   PetscFunctionBegin;
1677   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
1678   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1679   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
1680   /* Pull point contributions from remote leaves into local roots */
1681   ierr = DMLabelGather(label, sfPoint, &lblLeaves);CHKERRQ(ierr);
1682   ierr = DMLabelGetValueIS(lblLeaves, &rankIS);CHKERRQ(ierr);
1683   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
1684   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
1685   for (r = 0; r < numRanks; ++r) {
1686     const PetscInt remoteRank = ranks[r];
1687     if (remoteRank == rank) continue;
1688     ierr = DMLabelGetStratumIS(lblLeaves, remoteRank, &pointIS);CHKERRQ(ierr);
1689     ierr = DMLabelInsertIS(label, pointIS, remoteRank);CHKERRQ(ierr);
1690     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1691   }
1692   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
1693   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
1694   ierr = DMLabelDestroy(&lblLeaves);CHKERRQ(ierr);
1695   /* Push point contributions from roots into remote leaves */
1696   ierr = DMLabelDistribute(label, sfPoint, &lblRoots);CHKERRQ(ierr);
1697   ierr = DMLabelGetValueIS(lblRoots, &rankIS);CHKERRQ(ierr);
1698   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
1699   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
1700   for (r = 0; r < numRanks; ++r) {
1701     const PetscInt remoteRank = ranks[r];
1702     if (remoteRank == rank) continue;
1703     ierr = DMLabelGetStratumIS(lblRoots, remoteRank, &pointIS);CHKERRQ(ierr);
1704     ierr = DMLabelInsertIS(label, pointIS, remoteRank);CHKERRQ(ierr);
1705     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1706   }
1707   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
1708   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
1709   ierr = DMLabelDestroy(&lblRoots);CHKERRQ(ierr);
1710   PetscFunctionReturn(0);
1711 }
1712 
1713 #undef __FUNCT__
1714 #define __FUNCT__ "DMPlexPartitionLabelInvert"
1715 /*@
1716   DMPlexPartitionLabelInvert - Create a partition label of remote roots from a local root label
1717 
1718   Input Parameters:
1719 + dm        - The DM
1720 . rootLabel - DMLabel assinging ranks to local roots
1721 . processSF - A star forest mapping into the local index on each remote rank
1722 
1723   Output Parameter:
1724 - leafLabel - DMLabel assinging ranks to remote roots
1725 
1726   Note: The rootLabel defines a send pattern by mapping local points to remote target ranks. The
1727   resulting leafLabel is a receiver mapping of remote roots to their parent rank.
1728 
1729   Level: developer
1730 
1731 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1732 @*/
1733 PetscErrorCode DMPlexPartitionLabelInvert(DM dm, DMLabel rootLabel, PetscSF processSF, DMLabel leafLabel)
1734 {
1735   MPI_Comm           comm;
1736   PetscMPIInt        rank, numProcs;
1737   PetscInt           p, n, numNeighbors, size, l, nleaves;
1738   PetscSF            sfPoint;
1739   PetscSFNode       *rootPoints, *leafPoints;
1740   PetscSection       rootSection, leafSection;
1741   const PetscSFNode *remote;
1742   const PetscInt    *local, *neighbors;
1743   IS                 valueIS;
1744   PetscErrorCode     ierr;
1745 
1746   PetscFunctionBegin;
1747   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
1748   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1749   ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr);
1750   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
1751 
1752   /* Convert to (point, rank) and use actual owners */
1753   ierr = PetscSectionCreate(comm, &rootSection);CHKERRQ(ierr);
1754   ierr = PetscSectionSetChart(rootSection, 0, numProcs);CHKERRQ(ierr);
1755   ierr = DMLabelGetValueIS(rootLabel, &valueIS);CHKERRQ(ierr);
1756   ierr = ISGetLocalSize(valueIS, &numNeighbors);CHKERRQ(ierr);
1757   ierr = ISGetIndices(valueIS, &neighbors);CHKERRQ(ierr);
1758   for (n = 0; n < numNeighbors; ++n) {
1759     PetscInt numPoints;
1760 
1761     ierr = DMLabelGetStratumSize(rootLabel, neighbors[n], &numPoints);CHKERRQ(ierr);
1762     ierr = PetscSectionAddDof(rootSection, neighbors[n], numPoints);CHKERRQ(ierr);
1763   }
1764   ierr = PetscSectionSetUp(rootSection);CHKERRQ(ierr);
1765   ierr = PetscSectionGetStorageSize(rootSection, &size);CHKERRQ(ierr);
1766   ierr = PetscMalloc1(size, &rootPoints);CHKERRQ(ierr);
1767   ierr = PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);CHKERRQ(ierr);
1768   for (n = 0; n < numNeighbors; ++n) {
1769     IS              pointIS;
1770     const PetscInt *points;
1771     PetscInt        off, numPoints, p;
1772 
1773     ierr = PetscSectionGetOffset(rootSection, neighbors[n], &off);CHKERRQ(ierr);
1774     ierr = DMLabelGetStratumIS(rootLabel, neighbors[n], &pointIS);CHKERRQ(ierr);
1775     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
1776     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
1777     for (p = 0; p < numPoints; ++p) {
1778       if (local) {ierr = PetscFindInt(points[p], nleaves, local, &l);CHKERRQ(ierr);}
1779       else       {l = -1;}
1780       if (l >= 0) {rootPoints[off+p] = remote[l];}
1781       else        {rootPoints[off+p].index = points[p]; rootPoints[off+p].rank = rank;}
1782     }
1783     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
1784     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1785   }
1786   ierr = ISRestoreIndices(valueIS, &neighbors);CHKERRQ(ierr);
1787   ierr = ISDestroy(&valueIS);CHKERRQ(ierr);
1788   /* Communicate overlap */
1789   ierr = PetscSectionCreate(comm, &leafSection);CHKERRQ(ierr);
1790   ierr = DMPlexDistributeData(dm, processSF, rootSection, MPIU_2INT, rootPoints, leafSection, (void**) &leafPoints);CHKERRQ(ierr);
1791   /* Filter remote contributions (ovLeafPoints) into the overlapSF */
1792   ierr = PetscSectionGetStorageSize(leafSection, &size);CHKERRQ(ierr);
1793   for (p = 0; p < size; p++) {
1794     ierr = DMLabelSetValue(leafLabel, leafPoints[p].index, leafPoints[p].rank);CHKERRQ(ierr);
1795   }
1796   ierr = PetscFree(rootPoints);CHKERRQ(ierr);
1797   ierr = PetscSectionDestroy(&rootSection);CHKERRQ(ierr);
1798   ierr = PetscFree(leafPoints);CHKERRQ(ierr);
1799   ierr = PetscSectionDestroy(&leafSection);CHKERRQ(ierr);
1800   PetscFunctionReturn(0);
1801 }
1802 
1803 #undef __FUNCT__
1804 #define __FUNCT__ "DMPlexPartitionLabelCreateSF"
1805 /*@
1806   DMPlexPartitionLabelCreateSF - Create a star forest from a label that assigns ranks to points
1807 
1808   Input Parameters:
1809 + dm    - The DM
1810 . label - DMLabel assinging ranks to remote roots
1811 
1812   Output Parameter:
1813 - sf    - The star forest communication context encapsulating the defined mapping
1814 
1815   Note: The incoming label is a receiver mapping of remote points to their parent rank.
1816 
1817   Level: developer
1818 
1819 .seealso: DMPlexDistribute(), DMPlexCreateOverlap
1820 @*/
1821 PetscErrorCode DMPlexPartitionLabelCreateSF(DM dm, DMLabel label, PetscSF *sf)
1822 {
1823   PetscMPIInt     rank, numProcs;
1824   PetscInt        n, numRemote, p, numPoints, pStart, pEnd, idx = 0;
1825   PetscSFNode    *remotePoints;
1826   IS              remoteRootIS;
1827   const PetscInt *remoteRoots;
1828   PetscErrorCode ierr;
1829 
1830   PetscFunctionBegin;
1831   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
1832   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &numProcs);CHKERRQ(ierr);
1833 
1834   for (numRemote = 0, n = 0; n < numProcs; ++n) {
1835     ierr = DMLabelGetStratumSize(label, n, &numPoints);CHKERRQ(ierr);
1836     numRemote += numPoints;
1837   }
1838   ierr = PetscMalloc1(numRemote, &remotePoints);CHKERRQ(ierr);
1839   /* Put owned points first */
1840   ierr = DMLabelGetStratumSize(label, rank, &numPoints);CHKERRQ(ierr);
1841   if (numPoints > 0) {
1842     ierr = DMLabelGetStratumIS(label, rank, &remoteRootIS);CHKERRQ(ierr);
1843     ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
1844     for (p = 0; p < numPoints; p++) {
1845       remotePoints[idx].index = remoteRoots[p];
1846       remotePoints[idx].rank = rank;
1847       idx++;
1848     }
1849     ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
1850     ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr);
1851   }
1852   /* Now add remote points */
1853   for (n = 0; n < numProcs; ++n) {
1854     ierr = DMLabelGetStratumSize(label, n, &numPoints);CHKERRQ(ierr);
1855     if (numPoints <= 0 || n == rank) continue;
1856     ierr = DMLabelGetStratumIS(label, n, &remoteRootIS);CHKERRQ(ierr);
1857     ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
1858     for (p = 0; p < numPoints; p++) {
1859       remotePoints[idx].index = remoteRoots[p];
1860       remotePoints[idx].rank = n;
1861       idx++;
1862     }
1863     ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
1864     ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr);
1865   }
1866   ierr = PetscSFCreate(PetscObjectComm((PetscObject) dm), sf);CHKERRQ(ierr);
1867   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
1868   ierr = PetscSFSetGraph(*sf, pEnd-pStart, numRemote, NULL, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);CHKERRQ(ierr);
1869   PetscFunctionReturn(0);
1870 }
1871