xref: /petsc/src/dm/impls/plex/plexpartition.c (revision 2f6eced2a19e978d64f27de66fbc6c26cc5c7934)
1 #include <petsc/private/dmpleximpl.h>   /*I      "petscdmplex.h"   I*/
2 
3 PetscClassId PETSCPARTITIONER_CLASSID = 0;
4 
5 PetscFunctionList PetscPartitionerList              = NULL;
6 PetscBool         PetscPartitionerRegisterAllCalled = PETSC_FALSE;
7 
8 PetscBool ChacoPartitionercite = PETSC_FALSE;
9 const char ChacoPartitionerCitation[] = "@inproceedings{Chaco95,\n"
10                                "  author    = {Bruce Hendrickson and Robert Leland},\n"
11                                "  title     = {A multilevel algorithm for partitioning graphs},\n"
12                                "  booktitle = {Supercomputing '95: Proceedings of the 1995 ACM/IEEE Conference on Supercomputing (CDROM)},"
13                                "  isbn      = {0-89791-816-9},\n"
14                                "  pages     = {28},\n"
15                                "  doi       = {http://doi.acm.org/10.1145/224170.224228},\n"
16                                "  publisher = {ACM Press},\n"
17                                "  address   = {New York},\n"
18                                "  year      = {1995}\n}\n";
19 
20 PetscBool ParMetisPartitionercite = PETSC_FALSE;
21 const char ParMetisPartitionerCitation[] = "@article{KarypisKumar98,\n"
22                                "  author  = {George Karypis and Vipin Kumar},\n"
23                                "  title   = {A Parallel Algorithm for Multilevel Graph Partitioning and Sparse Matrix Ordering},\n"
24                                "  journal = {Journal of Parallel and Distributed Computing},\n"
25                                "  volume  = {48},\n"
26                                "  pages   = {71--85},\n"
27                                "  year    = {1998}\n}\n";
28 
29 /*@C
30   DMPlexCreatePartitionerGraph - Create a CSR graph of point connections for the partitioner
31 
32   Input Parameters:
33 + dm      - The mesh DM dm
34 - height  - Height of the strata from which to construct the graph
35 
36   Output Parameter:
37 + numVertices     - Number of vertices in the graph
38 . offsets         - Point offsets in the graph
39 . adjacency       - Point connectivity in the graph
40 - globalNumbering - A map from the local cell numbering to the global numbering used in "adjacency".  Negative indicates that the cell is a duplicate from another process.
41 
42   The user can control the definition of adjacency for the mesh using DMPlexGetAdjacencyUseCone() and
43   DMPlexSetAdjacencyUseClosure(). They should choose the combination appropriate for the function
44   representation on the mesh.
45 
46   Level: developer
47 
48 .seealso: PetscPartitionerGetType(), PetscPartitionerCreate(), DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure()
49 @*/
50 PetscErrorCode DMPlexCreatePartitionerGraph(DM dm, PetscInt height, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency, IS *globalNumbering)
51 {
52   PetscInt       p, pStart, pEnd, a, adjSize, idx, size, nroots;
53   PetscInt      *adj = NULL, *vOffsets = NULL, *graph = NULL;
54   IS             cellNumbering;
55   const PetscInt *cellNum;
56   PetscBool      useCone, useClosure;
57   PetscSection   section;
58   PetscSegBuffer adjBuffer;
59   PetscSF        sfPoint;
60   PetscMPIInt    rank;
61   PetscErrorCode ierr;
62 
63   PetscFunctionBegin;
64   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
65   ierr = DMPlexGetHeightStratum(dm, height, &pStart, &pEnd);CHKERRQ(ierr);
66   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
67   ierr = PetscSFGetGraph(sfPoint, &nroots, NULL, NULL, NULL);CHKERRQ(ierr);
68   /* Build adjacency graph via a section/segbuffer */
69   ierr = PetscSectionCreate(PetscObjectComm((PetscObject) dm), &section);CHKERRQ(ierr);
70   ierr = PetscSectionSetChart(section, pStart, pEnd);CHKERRQ(ierr);
71   ierr = PetscSegBufferCreate(sizeof(PetscInt),1000,&adjBuffer);CHKERRQ(ierr);
72   /* Always use FVM adjacency to create partitioner graph */
73   ierr = DMPlexGetAdjacencyUseCone(dm, &useCone);CHKERRQ(ierr);
74   ierr = DMPlexGetAdjacencyUseClosure(dm, &useClosure);CHKERRQ(ierr);
75   ierr = DMPlexSetAdjacencyUseCone(dm, PETSC_TRUE);CHKERRQ(ierr);
76   ierr = DMPlexSetAdjacencyUseClosure(dm, PETSC_FALSE);CHKERRQ(ierr);
77   ierr = DMPlexCreateCellNumbering_Internal(dm, PETSC_TRUE, &cellNumbering);CHKERRQ(ierr);
78   if (globalNumbering) {
79     ierr = PetscObjectReference((PetscObject)cellNumbering);CHKERRQ(ierr);
80     *globalNumbering = cellNumbering;
81   }
82   ierr = ISGetIndices(cellNumbering, &cellNum);CHKERRQ(ierr);
83   for (*numVertices = 0, p = pStart; p < pEnd; p++) {
84     /* Skip non-owned cells in parallel (ParMetis expects no overlap) */
85     if (nroots > 0) {if (cellNum[p] < 0) continue;}
86     adjSize = PETSC_DETERMINE;
87     ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
88     for (a = 0; a < adjSize; ++a) {
89       const PetscInt point = adj[a];
90       if (point != p && pStart <= point && point < pEnd) {
91         PetscInt *PETSC_RESTRICT pBuf;
92         ierr = PetscSectionAddDof(section, p, 1);CHKERRQ(ierr);
93         ierr = PetscSegBufferGetInts(adjBuffer, 1, &pBuf);CHKERRQ(ierr);
94         *pBuf = point;
95       }
96     }
97     (*numVertices)++;
98   }
99   ierr = DMPlexSetAdjacencyUseCone(dm, useCone);CHKERRQ(ierr);
100   ierr = DMPlexSetAdjacencyUseClosure(dm, useClosure);CHKERRQ(ierr);
101   /* Derive CSR graph from section/segbuffer */
102   ierr = PetscSectionSetUp(section);CHKERRQ(ierr);
103   ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr);
104   ierr = PetscMalloc1(*numVertices+1, &vOffsets);CHKERRQ(ierr);
105   for (idx = 0, p = pStart; p < pEnd; p++) {
106     if (nroots > 0) {if (cellNum[p] < 0) continue;}
107     ierr = PetscSectionGetOffset(section, p, &(vOffsets[idx++]));CHKERRQ(ierr);
108   }
109   vOffsets[*numVertices] = size;
110   if (offsets) *offsets = vOffsets;
111   ierr = PetscSegBufferExtractAlloc(adjBuffer, &graph);CHKERRQ(ierr);
112   {
113     ISLocalToGlobalMapping ltogCells;
114     PetscInt n, size, *cells_arr;
115     /* In parallel, apply a global cell numbering to the graph */
116     ierr = ISRestoreIndices(cellNumbering, &cellNum);CHKERRQ(ierr);
117     ierr = ISLocalToGlobalMappingCreateIS(cellNumbering, &ltogCells);CHKERRQ(ierr);
118     ierr = ISLocalToGlobalMappingGetSize(ltogCells, &size);CHKERRQ(ierr);
119     ierr = ISLocalToGlobalMappingGetIndices(ltogCells, (const PetscInt**)&cells_arr);CHKERRQ(ierr);
120     /* Convert to positive global cell numbers */
121     for (n=0; n<size; n++) {if (cells_arr[n] < 0) cells_arr[n] = -(cells_arr[n]+1);}
122     ierr = ISLocalToGlobalMappingRestoreIndices(ltogCells, (const PetscInt**)&cells_arr);CHKERRQ(ierr);
123     ierr = ISLocalToGlobalMappingApplyBlock(ltogCells, vOffsets[*numVertices], graph, graph);CHKERRQ(ierr);
124     ierr = ISLocalToGlobalMappingDestroy(&ltogCells);CHKERRQ(ierr);
125     ierr = ISDestroy(&cellNumbering);CHKERRQ(ierr);
126   }
127   if (adjacency) *adjacency = graph;
128   /* Clean up */
129   ierr = PetscSegBufferDestroy(&adjBuffer);CHKERRQ(ierr);
130   ierr = PetscSectionDestroy(&section);CHKERRQ(ierr);
131   ierr = PetscFree(adj);CHKERRQ(ierr);
132   PetscFunctionReturn(0);
133 }
134 
135 PetscErrorCode DMPlexCreateNeighborCSR(DM dm, PetscInt cellHeight, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency)
136 {
137   const PetscInt maxFaceCases = 30;
138   PetscInt       numFaceCases = 0;
139   PetscInt       numFaceVertices[30]; /* maxFaceCases, C89 sucks sucks sucks */
140   PetscInt      *off, *adj;
141   PetscInt      *neighborCells = NULL;
142   PetscInt       dim, cellDim, depth = 0, faceDepth, cStart, cEnd, c, numCells, cell;
143   PetscErrorCode ierr;
144 
145   PetscFunctionBegin;
146   /* For parallel partitioning, I think you have to communicate supports */
147   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
148   cellDim = dim - cellHeight;
149   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
150   ierr = DMPlexGetHeightStratum(dm, cellHeight, &cStart, &cEnd);CHKERRQ(ierr);
151   if (cEnd - cStart == 0) {
152     if (numVertices) *numVertices = 0;
153     if (offsets)   *offsets   = NULL;
154     if (adjacency) *adjacency = NULL;
155     PetscFunctionReturn(0);
156   }
157   numCells  = cEnd - cStart;
158   faceDepth = depth - cellHeight;
159   if (dim == depth) {
160     PetscInt f, fStart, fEnd;
161 
162     ierr = PetscCalloc1(numCells+1, &off);CHKERRQ(ierr);
163     /* Count neighboring cells */
164     ierr = DMPlexGetHeightStratum(dm, cellHeight+1, &fStart, &fEnd);CHKERRQ(ierr);
165     for (f = fStart; f < fEnd; ++f) {
166       const PetscInt *support;
167       PetscInt        supportSize;
168       ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
169       ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
170       if (supportSize == 2) {
171         PetscInt numChildren;
172 
173         ierr = DMPlexGetTreeChildren(dm,f,&numChildren,NULL);CHKERRQ(ierr);
174         if (!numChildren) {
175           ++off[support[0]-cStart+1];
176           ++off[support[1]-cStart+1];
177         }
178       }
179     }
180     /* Prefix sum */
181     for (c = 1; c <= numCells; ++c) off[c] += off[c-1];
182     if (adjacency) {
183       PetscInt *tmp;
184 
185       ierr = PetscMalloc1(off[numCells], &adj);CHKERRQ(ierr);
186       ierr = PetscMalloc1(numCells+1, &tmp);CHKERRQ(ierr);
187       ierr = PetscMemcpy(tmp, off, (numCells+1) * sizeof(PetscInt));CHKERRQ(ierr);
188       /* Get neighboring cells */
189       for (f = fStart; f < fEnd; ++f) {
190         const PetscInt *support;
191         PetscInt        supportSize;
192         ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
193         ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
194         if (supportSize == 2) {
195           PetscInt numChildren;
196 
197           ierr = DMPlexGetTreeChildren(dm,f,&numChildren,NULL);CHKERRQ(ierr);
198           if (!numChildren) {
199             adj[tmp[support[0]-cStart]++] = support[1];
200             adj[tmp[support[1]-cStart]++] = support[0];
201           }
202         }
203       }
204 #if defined(PETSC_USE_DEBUG)
205       for (c = 0; c < cEnd-cStart; ++c) if (tmp[c] != off[c+1]) SETERRQ3(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Offset %d != %d for cell %d", tmp[c], off[c], c+cStart);
206 #endif
207       ierr = PetscFree(tmp);CHKERRQ(ierr);
208     }
209     if (numVertices) *numVertices = numCells;
210     if (offsets)   *offsets   = off;
211     if (adjacency) *adjacency = adj;
212     PetscFunctionReturn(0);
213   }
214   /* Setup face recognition */
215   if (faceDepth == 1) {
216     PetscInt cornersSeen[30] = {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0}; /* Could use PetscBT */
217 
218     for (c = cStart; c < cEnd; ++c) {
219       PetscInt corners;
220 
221       ierr = DMPlexGetConeSize(dm, c, &corners);CHKERRQ(ierr);
222       if (!cornersSeen[corners]) {
223         PetscInt nFV;
224 
225         if (numFaceCases >= maxFaceCases) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_PLIB, "Exceeded maximum number of face recognition cases");
226         cornersSeen[corners] = 1;
227 
228         ierr = DMPlexGetNumFaceVertices(dm, cellDim, corners, &nFV);CHKERRQ(ierr);
229 
230         numFaceVertices[numFaceCases++] = nFV;
231       }
232     }
233   }
234   ierr = PetscCalloc1(numCells+1, &off);CHKERRQ(ierr);
235   /* Count neighboring cells */
236   for (cell = cStart; cell < cEnd; ++cell) {
237     PetscInt numNeighbors = PETSC_DETERMINE, n;
238 
239     ierr = DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);CHKERRQ(ierr);
240     /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
241     for (n = 0; n < numNeighbors; ++n) {
242       PetscInt        cellPair[2];
243       PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
244       PetscInt        meetSize = 0;
245       const PetscInt *meet    = NULL;
246 
247       cellPair[0] = cell; cellPair[1] = neighborCells[n];
248       if (cellPair[0] == cellPair[1]) continue;
249       if (!found) {
250         ierr = DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
251         if (meetSize) {
252           PetscInt f;
253 
254           for (f = 0; f < numFaceCases; ++f) {
255             if (numFaceVertices[f] == meetSize) {
256               found = PETSC_TRUE;
257               break;
258             }
259           }
260         }
261         ierr = DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
262       }
263       if (found) ++off[cell-cStart+1];
264     }
265   }
266   /* Prefix sum */
267   for (cell = 1; cell <= numCells; ++cell) off[cell] += off[cell-1];
268 
269   if (adjacency) {
270     ierr = PetscMalloc1(off[numCells], &adj);CHKERRQ(ierr);
271     /* Get neighboring cells */
272     for (cell = cStart; cell < cEnd; ++cell) {
273       PetscInt numNeighbors = PETSC_DETERMINE, n;
274       PetscInt cellOffset   = 0;
275 
276       ierr = DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);CHKERRQ(ierr);
277       /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
278       for (n = 0; n < numNeighbors; ++n) {
279         PetscInt        cellPair[2];
280         PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
281         PetscInt        meetSize = 0;
282         const PetscInt *meet    = NULL;
283 
284         cellPair[0] = cell; cellPair[1] = neighborCells[n];
285         if (cellPair[0] == cellPair[1]) continue;
286         if (!found) {
287           ierr = DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
288           if (meetSize) {
289             PetscInt f;
290 
291             for (f = 0; f < numFaceCases; ++f) {
292               if (numFaceVertices[f] == meetSize) {
293                 found = PETSC_TRUE;
294                 break;
295               }
296             }
297           }
298           ierr = DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
299         }
300         if (found) {
301           adj[off[cell-cStart]+cellOffset] = neighborCells[n];
302           ++cellOffset;
303         }
304       }
305     }
306   }
307   ierr = PetscFree(neighborCells);CHKERRQ(ierr);
308   if (numVertices) *numVertices = numCells;
309   if (offsets)   *offsets   = off;
310   if (adjacency) *adjacency = adj;
311   PetscFunctionReturn(0);
312 }
313 
314 /*@C
315   PetscPartitionerRegister - Adds a new PetscPartitioner implementation
316 
317   Not Collective
318 
319   Input Parameters:
320 + name        - The name of a new user-defined creation routine
321 - create_func - The creation routine itself
322 
323   Notes:
324   PetscPartitionerRegister() may be called multiple times to add several user-defined PetscPartitioners
325 
326   Sample usage:
327 .vb
328     PetscPartitionerRegister("my_part", MyPetscPartitionerCreate);
329 .ve
330 
331   Then, your PetscPartitioner type can be chosen with the procedural interface via
332 .vb
333     PetscPartitionerCreate(MPI_Comm, PetscPartitioner *);
334     PetscPartitionerSetType(PetscPartitioner, "my_part");
335 .ve
336    or at runtime via the option
337 .vb
338     -petscpartitioner_type my_part
339 .ve
340 
341   Level: advanced
342 
343 .keywords: PetscPartitioner, register
344 .seealso: PetscPartitionerRegisterAll(), PetscPartitionerRegisterDestroy()
345 
346 @*/
347 PetscErrorCode PetscPartitionerRegister(const char sname[], PetscErrorCode (*function)(PetscPartitioner))
348 {
349   PetscErrorCode ierr;
350 
351   PetscFunctionBegin;
352   ierr = PetscFunctionListAdd(&PetscPartitionerList, sname, function);CHKERRQ(ierr);
353   PetscFunctionReturn(0);
354 }
355 
356 /*@C
357   PetscPartitionerSetType - Builds a particular PetscPartitioner
358 
359   Collective on PetscPartitioner
360 
361   Input Parameters:
362 + part - The PetscPartitioner object
363 - name - The kind of partitioner
364 
365   Options Database Key:
366 . -petscpartitioner_type <type> - Sets the PetscPartitioner type; use -help for a list of available types
367 
368   Level: intermediate
369 
370 .keywords: PetscPartitioner, set, type
371 .seealso: PetscPartitionerGetType(), PetscPartitionerCreate()
372 @*/
373 PetscErrorCode PetscPartitionerSetType(PetscPartitioner part, PetscPartitionerType name)
374 {
375   PetscErrorCode (*r)(PetscPartitioner);
376   PetscBool      match;
377   PetscErrorCode ierr;
378 
379   PetscFunctionBegin;
380   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
381   ierr = PetscObjectTypeCompare((PetscObject) part, name, &match);CHKERRQ(ierr);
382   if (match) PetscFunctionReturn(0);
383 
384   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
385   ierr = PetscFunctionListFind(PetscPartitionerList, name, &r);CHKERRQ(ierr);
386   if (!r) SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown PetscPartitioner type: %s", name);
387 
388   if (part->ops->destroy) {
389     ierr              = (*part->ops->destroy)(part);CHKERRQ(ierr);
390     part->ops->destroy = NULL;
391   }
392   ierr = (*r)(part);CHKERRQ(ierr);
393   ierr = PetscObjectChangeTypeName((PetscObject) part, name);CHKERRQ(ierr);
394   PetscFunctionReturn(0);
395 }
396 
397 /*@C
398   PetscPartitionerGetType - Gets the PetscPartitioner type name (as a string) from the object.
399 
400   Not Collective
401 
402   Input Parameter:
403 . part - The PetscPartitioner
404 
405   Output Parameter:
406 . name - The PetscPartitioner type name
407 
408   Level: intermediate
409 
410 .keywords: PetscPartitioner, get, type, name
411 .seealso: PetscPartitionerSetType(), PetscPartitionerCreate()
412 @*/
413 PetscErrorCode PetscPartitionerGetType(PetscPartitioner part, PetscPartitionerType *name)
414 {
415   PetscErrorCode ierr;
416 
417   PetscFunctionBegin;
418   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
419   PetscValidPointer(name, 2);
420   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
421   *name = ((PetscObject) part)->type_name;
422   PetscFunctionReturn(0);
423 }
424 
425 /*@C
426   PetscPartitionerView - Views a PetscPartitioner
427 
428   Collective on PetscPartitioner
429 
430   Input Parameter:
431 + part - the PetscPartitioner object to view
432 - v    - the viewer
433 
434   Level: developer
435 
436 .seealso: PetscPartitionerDestroy()
437 @*/
438 PetscErrorCode PetscPartitionerView(PetscPartitioner part, PetscViewer v)
439 {
440   PetscErrorCode ierr;
441 
442   PetscFunctionBegin;
443   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
444   if (!v) {ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject) part), &v);CHKERRQ(ierr);}
445   if (part->ops->view) {ierr = (*part->ops->view)(part, v);CHKERRQ(ierr);}
446   PetscFunctionReturn(0);
447 }
448 
449 PetscErrorCode PetscPartitionerSetTypeFromOptions_Internal(PetscPartitioner part)
450 {
451   const char    *defaultType;
452   char           name[256];
453   PetscBool      flg;
454   PetscErrorCode ierr;
455 
456   PetscFunctionBegin;
457   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
458   if (!((PetscObject) part)->type_name)
459 #if defined(PETSC_HAVE_CHACO)
460     defaultType = PETSCPARTITIONERCHACO;
461 #elif defined(PETSC_HAVE_PARMETIS)
462     defaultType = PETSCPARTITIONERPARMETIS;
463 #else
464     defaultType = PETSCPARTITIONERSIMPLE;
465 #endif
466   else
467     defaultType = ((PetscObject) part)->type_name;
468   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
469 
470   ierr = PetscObjectOptionsBegin((PetscObject) part);CHKERRQ(ierr);
471   ierr = PetscOptionsFList("-petscpartitioner_type", "Graph partitioner", "PetscPartitionerSetType", PetscPartitionerList, defaultType, name, 256, &flg);CHKERRQ(ierr);
472   if (flg) {
473     ierr = PetscPartitionerSetType(part, name);CHKERRQ(ierr);
474   } else if (!((PetscObject) part)->type_name) {
475     ierr = PetscPartitionerSetType(part, defaultType);CHKERRQ(ierr);
476   }
477   ierr = PetscOptionsEnd();CHKERRQ(ierr);
478   PetscFunctionReturn(0);
479 }
480 
481 /*@
482   PetscPartitionerSetFromOptions - sets parameters in a PetscPartitioner from the options database
483 
484   Collective on PetscPartitioner
485 
486   Input Parameter:
487 . part - the PetscPartitioner object to set options for
488 
489   Level: developer
490 
491 .seealso: PetscPartitionerView()
492 @*/
493 PetscErrorCode PetscPartitionerSetFromOptions(PetscPartitioner part)
494 {
495   PetscErrorCode ierr;
496 
497   PetscFunctionBegin;
498   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
499   ierr = PetscPartitionerSetTypeFromOptions_Internal(part);CHKERRQ(ierr);
500 
501   ierr = PetscObjectOptionsBegin((PetscObject) part);CHKERRQ(ierr);
502   if (part->ops->setfromoptions) {ierr = (*part->ops->setfromoptions)(PetscOptionsObject,part);CHKERRQ(ierr);}
503   /* process any options handlers added with PetscObjectAddOptionsHandler() */
504   ierr = PetscObjectProcessOptionsHandlers(PetscOptionsObject,(PetscObject) part);CHKERRQ(ierr);
505   ierr = PetscOptionsEnd();CHKERRQ(ierr);
506   ierr = PetscPartitionerViewFromOptions(part, NULL, "-petscpartitioner_view");CHKERRQ(ierr);
507   PetscFunctionReturn(0);
508 }
509 
510 /*@C
511   PetscPartitionerSetUp - Construct data structures for the PetscPartitioner
512 
513   Collective on PetscPartitioner
514 
515   Input Parameter:
516 . part - the PetscPartitioner object to setup
517 
518   Level: developer
519 
520 .seealso: PetscPartitionerView(), PetscPartitionerDestroy()
521 @*/
522 PetscErrorCode PetscPartitionerSetUp(PetscPartitioner part)
523 {
524   PetscErrorCode ierr;
525 
526   PetscFunctionBegin;
527   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
528   if (part->ops->setup) {ierr = (*part->ops->setup)(part);CHKERRQ(ierr);}
529   PetscFunctionReturn(0);
530 }
531 
532 /*@
533   PetscPartitionerDestroy - Destroys a PetscPartitioner object
534 
535   Collective on PetscPartitioner
536 
537   Input Parameter:
538 . part - the PetscPartitioner object to destroy
539 
540   Level: developer
541 
542 .seealso: PetscPartitionerView()
543 @*/
544 PetscErrorCode PetscPartitionerDestroy(PetscPartitioner *part)
545 {
546   PetscErrorCode ierr;
547 
548   PetscFunctionBegin;
549   if (!*part) PetscFunctionReturn(0);
550   PetscValidHeaderSpecific((*part), PETSCPARTITIONER_CLASSID, 1);
551 
552   if (--((PetscObject)(*part))->refct > 0) {*part = 0; PetscFunctionReturn(0);}
553   ((PetscObject) (*part))->refct = 0;
554 
555   if ((*part)->ops->destroy) {ierr = (*(*part)->ops->destroy)(*part);CHKERRQ(ierr);}
556   ierr = PetscHeaderDestroy(part);CHKERRQ(ierr);
557   PetscFunctionReturn(0);
558 }
559 
560 /*@
561   PetscPartitionerCreate - Creates an empty PetscPartitioner object. The type can then be set with PetscPartitionerSetType().
562 
563   Collective on MPI_Comm
564 
565   Input Parameter:
566 . comm - The communicator for the PetscPartitioner object
567 
568   Output Parameter:
569 . part - The PetscPartitioner object
570 
571   Level: beginner
572 
573 .seealso: PetscPartitionerSetType(), PETSCPARTITIONERCHACO, PETSCPARTITIONERPARMETIS, PETSCPARTITIONERSHELL, PETSCPARTITIONERSIMPLE, PETSCPARTITIONERGATHER
574 @*/
575 PetscErrorCode PetscPartitionerCreate(MPI_Comm comm, PetscPartitioner *part)
576 {
577   PetscPartitioner p;
578   PetscErrorCode   ierr;
579 
580   PetscFunctionBegin;
581   PetscValidPointer(part, 2);
582   *part = NULL;
583   ierr = DMInitializePackage();CHKERRQ(ierr);
584 
585   ierr = PetscHeaderCreate(p, PETSCPARTITIONER_CLASSID, "PetscPartitioner", "Graph Partitioner", "PetscPartitioner", comm, PetscPartitionerDestroy, PetscPartitionerView);CHKERRQ(ierr);
586 
587   *part = p;
588   PetscFunctionReturn(0);
589 }
590 
591 /*@
592   PetscPartitionerPartition - Create a non-overlapping partition of the cells in the mesh
593 
594   Collective on DM
595 
596   Input Parameters:
597 + part    - The PetscPartitioner
598 - dm      - The mesh DM
599 
600   Output Parameters:
601 + partSection     - The PetscSection giving the division of points by partition
602 - partition       - The list of points by partition
603 
604   Note: Instead of cells, points at a given height can be partitioned by calling PetscPartitionerSetPointHeight()
605 
606   Level: developer
607 
608 .seealso DMPlexDistribute(), PetscPartitionerSetPointHeight(), PetscPartitionerCreate()
609 @*/
610 PetscErrorCode PetscPartitionerPartition(PetscPartitioner part, DM dm, PetscSection partSection, IS *partition)
611 {
612   PetscMPIInt    size;
613   PetscErrorCode ierr;
614 
615   PetscFunctionBegin;
616   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
617   PetscValidHeaderSpecific(dm, DM_CLASSID, 2);
618   PetscValidHeaderSpecific(partSection, PETSC_SECTION_CLASSID, 4);
619   PetscValidPointer(partition, 5);
620   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) part), &size);CHKERRQ(ierr);
621   if (size == 1) {
622     PetscInt *points;
623     PetscInt  cStart, cEnd, c;
624 
625     ierr = DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);CHKERRQ(ierr);
626     ierr = PetscSectionSetChart(partSection, 0, size);CHKERRQ(ierr);
627     ierr = PetscSectionSetDof(partSection, 0, cEnd-cStart);CHKERRQ(ierr);
628     ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
629     ierr = PetscMalloc1(cEnd-cStart, &points);CHKERRQ(ierr);
630     for (c = cStart; c < cEnd; ++c) points[c] = c;
631     ierr = ISCreateGeneral(PetscObjectComm((PetscObject) part), cEnd-cStart, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
632     PetscFunctionReturn(0);
633   }
634   if (part->height == 0) {
635     PetscInt numVertices;
636     PetscInt *start     = NULL;
637     PetscInt *adjacency = NULL;
638     IS       globalNumbering;
639 
640     ierr = DMPlexCreatePartitionerGraph(dm, 0, &numVertices, &start, &adjacency, &globalNumbering);CHKERRQ(ierr);
641     if (!part->ops->partition) SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_WRONGSTATE, "PetscPartitioner has no type");
642     ierr = (*part->ops->partition)(part, dm, size, numVertices, start, adjacency, partSection, partition);CHKERRQ(ierr);
643     ierr = PetscFree(start);CHKERRQ(ierr);
644     ierr = PetscFree(adjacency);CHKERRQ(ierr);
645     if (globalNumbering) { /* partition is wrt global unique numbering: change this to be wrt local numbering */
646       const PetscInt *globalNum;
647       const PetscInt *partIdx;
648       PetscInt *map, cStart, cEnd;
649       PetscInt *adjusted, i, localSize, offset;
650       IS    newPartition;
651 
652       ierr = ISGetLocalSize(*partition,&localSize);CHKERRQ(ierr);
653       ierr = PetscMalloc1(localSize,&adjusted);CHKERRQ(ierr);
654       ierr = ISGetIndices(globalNumbering,&globalNum);CHKERRQ(ierr);
655       ierr = ISGetIndices(*partition,&partIdx);CHKERRQ(ierr);
656       ierr = PetscMalloc1(localSize,&map);CHKERRQ(ierr);
657       ierr = DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);CHKERRQ(ierr);
658       for (i = cStart, offset = 0; i < cEnd; i++) {
659         if (globalNum[i - cStart] >= 0) map[offset++] = i;
660       }
661       for (i = 0; i < localSize; i++) {
662         adjusted[i] = map[partIdx[i]];
663       }
664       ierr = PetscFree(map);CHKERRQ(ierr);
665       ierr = ISRestoreIndices(*partition,&partIdx);CHKERRQ(ierr);
666       ierr = ISRestoreIndices(globalNumbering,&globalNum);CHKERRQ(ierr);
667       ierr = ISCreateGeneral(PETSC_COMM_SELF,localSize,adjusted,PETSC_OWN_POINTER,&newPartition);CHKERRQ(ierr);
668       ierr = ISDestroy(&globalNumbering);CHKERRQ(ierr);
669       ierr = ISDestroy(partition);CHKERRQ(ierr);
670       *partition = newPartition;
671     }
672   } else SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_OUTOFRANGE, "Invalid height %D for points to partition", part->height);
673   PetscFunctionReturn(0);
674 
675 }
676 
677 PetscErrorCode PetscPartitionerDestroy_Shell(PetscPartitioner part)
678 {
679   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
680   PetscErrorCode          ierr;
681 
682   PetscFunctionBegin;
683   ierr = PetscSectionDestroy(&p->section);CHKERRQ(ierr);
684   ierr = ISDestroy(&p->partition);CHKERRQ(ierr);
685   ierr = PetscFree(p);CHKERRQ(ierr);
686   PetscFunctionReturn(0);
687 }
688 
689 PetscErrorCode PetscPartitionerView_Shell_Ascii(PetscPartitioner part, PetscViewer viewer)
690 {
691   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
692   PetscViewerFormat       format;
693   PetscErrorCode          ierr;
694 
695   PetscFunctionBegin;
696   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
697   ierr = PetscViewerASCIIPrintf(viewer, "Shell Graph Partitioner:\n");CHKERRQ(ierr);
698   if (p->random) {
699     ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
700     ierr = PetscViewerASCIIPrintf(viewer, "using random partition\n");CHKERRQ(ierr);
701     ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
702   }
703   PetscFunctionReturn(0);
704 }
705 
706 PetscErrorCode PetscPartitionerView_Shell(PetscPartitioner part, PetscViewer viewer)
707 {
708   PetscBool      iascii;
709   PetscErrorCode ierr;
710 
711   PetscFunctionBegin;
712   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
713   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
714   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
715   if (iascii) {ierr = PetscPartitionerView_Shell_Ascii(part, viewer);CHKERRQ(ierr);}
716   PetscFunctionReturn(0);
717 }
718 
719 PetscErrorCode PetscPartitionerSetFromOptions_Shell(PetscOptionItems *PetscOptionsObject, PetscPartitioner part)
720 {
721   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
722   PetscErrorCode          ierr;
723 
724   PetscFunctionBegin;
725   ierr = PetscOptionsHead(PetscOptionsObject, "PetscPartitionerShell Options");CHKERRQ(ierr);
726   ierr = PetscOptionsBool("-petscpartitioner_shell_random", "Use a random partition", "PetscPartitionerView", PETSC_FALSE, &p->random, NULL);CHKERRQ(ierr);
727   ierr = PetscOptionsTail();CHKERRQ(ierr);
728   PetscFunctionReturn(0);
729 }
730 
731 PetscErrorCode PetscPartitionerPartition_Shell(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
732 {
733   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
734   PetscInt                np;
735   PetscErrorCode          ierr;
736 
737   PetscFunctionBegin;
738   if (p->random) {
739     PetscRandom r;
740     PetscInt   *sizes, *points, v;
741 
742     ierr = PetscRandomCreate(PETSC_COMM_SELF, &r);CHKERRQ(ierr);
743     ierr = PetscRandomSetInterval(r, 0.0, (PetscScalar) (nparts+1));CHKERRQ(ierr);
744     ierr = PetscRandomSetFromOptions(r);CHKERRQ(ierr);
745     ierr = PetscCalloc2(nparts, &sizes, numVertices, &points);CHKERRQ(ierr);
746     for (v = 0; v < numVertices; ++v) {
747       PetscReal val;
748       PetscInt  part;
749 
750       ierr = PetscRandomGetValueReal(r, &val);CHKERRQ(ierr);
751       part = PetscFloorReal(val);
752       points[v] = part;
753       ++sizes[part];
754     }
755     ierr = PetscRandomDestroy(&r);CHKERRQ(ierr);
756     ierr = PetscPartitionerShellSetPartition(part, nparts, sizes, points);CHKERRQ(ierr);
757     ierr = PetscFree2(sizes, points);CHKERRQ(ierr);
758   }
759   ierr = PetscSectionGetChart(p->section, NULL, &np);CHKERRQ(ierr);
760   if (nparts != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of requested partitions %d != configured partitions %d", nparts, np);
761   ierr = ISGetLocalSize(p->partition, &np);CHKERRQ(ierr);
762   if (numVertices != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of input vertices %d != configured vertices %d", numVertices, np);
763   ierr = PetscSectionCopy(p->section, partSection);CHKERRQ(ierr);
764   *partition = p->partition;
765   ierr = PetscObjectReference((PetscObject) p->partition);CHKERRQ(ierr);
766   PetscFunctionReturn(0);
767 }
768 
769 PetscErrorCode PetscPartitionerInitialize_Shell(PetscPartitioner part)
770 {
771   PetscFunctionBegin;
772   part->ops->view           = PetscPartitionerView_Shell;
773   part->ops->setfromoptions = PetscPartitionerSetFromOptions_Shell;
774   part->ops->destroy        = PetscPartitionerDestroy_Shell;
775   part->ops->partition      = PetscPartitionerPartition_Shell;
776   PetscFunctionReturn(0);
777 }
778 
779 /*MC
780   PETSCPARTITIONERSHELL = "shell" - A PetscPartitioner object
781 
782   Level: intermediate
783 
784 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
785 M*/
786 
787 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Shell(PetscPartitioner part)
788 {
789   PetscPartitioner_Shell *p;
790   PetscErrorCode          ierr;
791 
792   PetscFunctionBegin;
793   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
794   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
795   part->data = p;
796 
797   ierr = PetscPartitionerInitialize_Shell(part);CHKERRQ(ierr);
798   p->random = PETSC_FALSE;
799   PetscFunctionReturn(0);
800 }
801 
802 /*@C
803   PetscPartitionerShellSetPartition - Set an artifical partition for a mesh
804 
805   Collective on Part
806 
807   Input Parameters:
808 + part     - The PetscPartitioner
809 . numProcs - The number of partitions
810 . sizes    - array of size numProcs (or NULL) providing the number of points in each partition
811 - points   - array of size sum(sizes) (may be NULL iff sizes is NULL), a permutation of the points that groups those assigned to each partition in order (i.e., partition 0 first, partition 1 next, etc.)
812 
813   Level: developer
814 
815   Notes:
816 
817     It is safe to free the sizes and points arrays after use in this routine.
818 
819 .seealso DMPlexDistribute(), PetscPartitionerCreate()
820 @*/
821 PetscErrorCode PetscPartitionerShellSetPartition(PetscPartitioner part, PetscInt numProcs, const PetscInt sizes[], const PetscInt points[])
822 {
823   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
824   PetscInt                proc, numPoints;
825   PetscErrorCode          ierr;
826 
827   PetscFunctionBegin;
828   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
829   if (sizes) {PetscValidPointer(sizes, 3);}
830   if (sizes) {PetscValidPointer(points, 4);}
831   ierr = PetscSectionDestroy(&p->section);CHKERRQ(ierr);
832   ierr = ISDestroy(&p->partition);CHKERRQ(ierr);
833   ierr = PetscSectionCreate(PetscObjectComm((PetscObject) part), &p->section);CHKERRQ(ierr);
834   ierr = PetscSectionSetChart(p->section, 0, numProcs);CHKERRQ(ierr);
835   if (sizes) {
836     for (proc = 0; proc < numProcs; ++proc) {
837       ierr = PetscSectionSetDof(p->section, proc, sizes[proc]);CHKERRQ(ierr);
838     }
839   }
840   ierr = PetscSectionSetUp(p->section);CHKERRQ(ierr);
841   ierr = PetscSectionGetStorageSize(p->section, &numPoints);CHKERRQ(ierr);
842   ierr = ISCreateGeneral(PetscObjectComm((PetscObject) part), numPoints, points, PETSC_COPY_VALUES, &p->partition);CHKERRQ(ierr);
843   PetscFunctionReturn(0);
844 }
845 
846 /*@
847   PetscPartitionerShellSetRandom - Set the flag to use a random partition
848 
849   Collective on Part
850 
851   Input Parameters:
852 + part   - The PetscPartitioner
853 - random - The flag to use a random partition
854 
855   Level: intermediate
856 
857 .seealso PetscPartitionerShellGetRandom(), PetscPartitionerCreate()
858 @*/
859 PetscErrorCode PetscPartitionerShellSetRandom(PetscPartitioner part, PetscBool random)
860 {
861   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
862 
863   PetscFunctionBegin;
864   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
865   p->random = random;
866   PetscFunctionReturn(0);
867 }
868 
869 /*@
870   PetscPartitionerShellGetRandom - get the flag to use a random partition
871 
872   Collective on Part
873 
874   Input Parameter:
875 . part   - The PetscPartitioner
876 
877   Output Parameter
878 . random - The flag to use a random partition
879 
880   Level: intermediate
881 
882 .seealso PetscPartitionerShellSetRandom(), PetscPartitionerCreate()
883 @*/
884 PetscErrorCode PetscPartitionerShellGetRandom(PetscPartitioner part, PetscBool *random)
885 {
886   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
887 
888   PetscFunctionBegin;
889   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
890   PetscValidPointer(random, 2);
891   *random = p->random;
892   PetscFunctionReturn(0);
893 }
894 
895 PetscErrorCode PetscPartitionerDestroy_Simple(PetscPartitioner part)
896 {
897   PetscPartitioner_Simple *p = (PetscPartitioner_Simple *) part->data;
898   PetscErrorCode          ierr;
899 
900   PetscFunctionBegin;
901   ierr = PetscFree(p);CHKERRQ(ierr);
902   PetscFunctionReturn(0);
903 }
904 
905 PetscErrorCode PetscPartitionerView_Simple_Ascii(PetscPartitioner part, PetscViewer viewer)
906 {
907   PetscViewerFormat format;
908   PetscErrorCode    ierr;
909 
910   PetscFunctionBegin;
911   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
912   ierr = PetscViewerASCIIPrintf(viewer, "Simple Graph Partitioner:\n");CHKERRQ(ierr);
913   PetscFunctionReturn(0);
914 }
915 
916 PetscErrorCode PetscPartitionerView_Simple(PetscPartitioner part, PetscViewer viewer)
917 {
918   PetscBool      iascii;
919   PetscErrorCode ierr;
920 
921   PetscFunctionBegin;
922   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
923   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
924   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
925   if (iascii) {ierr = PetscPartitionerView_Simple_Ascii(part, viewer);CHKERRQ(ierr);}
926   PetscFunctionReturn(0);
927 }
928 
929 PetscErrorCode PetscPartitionerPartition_Simple(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
930 {
931   MPI_Comm       comm;
932   PetscInt       np;
933   PetscMPIInt    size;
934   PetscErrorCode ierr;
935 
936   PetscFunctionBegin;
937   comm = PetscObjectComm((PetscObject)dm);
938   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
939   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
940   ierr = ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);CHKERRQ(ierr);
941   if (size == 1) {
942     for (np = 0; np < nparts; ++np) {ierr = PetscSectionSetDof(partSection, np, numVertices/nparts + ((numVertices % nparts) > np));CHKERRQ(ierr);}
943   }
944   else {
945     PetscMPIInt rank;
946     PetscInt nvGlobal, *offsets, myFirst, myLast;
947 
948     ierr = PetscMalloc1(size+1,&offsets);CHKERRQ(ierr);
949     offsets[0] = 0;
950     ierr = MPI_Allgather(&numVertices,1,MPIU_INT,&offsets[1],1,MPIU_INT,comm);CHKERRQ(ierr);
951     for (np = 2; np <= size; np++) {
952       offsets[np] += offsets[np-1];
953     }
954     nvGlobal = offsets[size];
955     ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
956     myFirst = offsets[rank];
957     myLast  = offsets[rank + 1] - 1;
958     ierr = PetscFree(offsets);CHKERRQ(ierr);
959     if (numVertices) {
960       PetscInt firstPart = 0, firstLargePart = 0;
961       PetscInt lastPart = 0, lastLargePart = 0;
962       PetscInt rem = nvGlobal % nparts;
963       PetscInt pSmall = nvGlobal/nparts;
964       PetscInt pBig = nvGlobal/nparts + 1;
965 
966 
967       if (rem) {
968         firstLargePart = myFirst / pBig;
969         lastLargePart  = myLast  / pBig;
970 
971         if (firstLargePart < rem) {
972           firstPart = firstLargePart;
973         }
974         else {
975           firstPart = rem + (myFirst - (rem * pBig)) / pSmall;
976         }
977         if (lastLargePart < rem) {
978           lastPart = lastLargePart;
979         }
980         else {
981           lastPart = rem + (myLast - (rem * pBig)) / pSmall;
982         }
983       }
984       else {
985         firstPart = myFirst / (nvGlobal/nparts);
986         lastPart  = myLast  / (nvGlobal/nparts);
987       }
988 
989       for (np = firstPart; np <= lastPart; np++) {
990         PetscInt PartStart =  np    * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np);
991         PetscInt PartEnd   = (np+1) * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np+1);
992 
993         PartStart = PetscMax(PartStart,myFirst);
994         PartEnd   = PetscMin(PartEnd,myLast+1);
995         ierr = PetscSectionSetDof(partSection,np,PartEnd-PartStart);CHKERRQ(ierr);
996       }
997     }
998   }
999   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1000   PetscFunctionReturn(0);
1001 }
1002 
1003 PetscErrorCode PetscPartitionerInitialize_Simple(PetscPartitioner part)
1004 {
1005   PetscFunctionBegin;
1006   part->ops->view      = PetscPartitionerView_Simple;
1007   part->ops->destroy   = PetscPartitionerDestroy_Simple;
1008   part->ops->partition = PetscPartitionerPartition_Simple;
1009   PetscFunctionReturn(0);
1010 }
1011 
1012 /*MC
1013   PETSCPARTITIONERSIMPLE = "simple" - A PetscPartitioner object
1014 
1015   Level: intermediate
1016 
1017 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1018 M*/
1019 
1020 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Simple(PetscPartitioner part)
1021 {
1022   PetscPartitioner_Simple *p;
1023   PetscErrorCode           ierr;
1024 
1025   PetscFunctionBegin;
1026   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1027   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1028   part->data = p;
1029 
1030   ierr = PetscPartitionerInitialize_Simple(part);CHKERRQ(ierr);
1031   PetscFunctionReturn(0);
1032 }
1033 
1034 PetscErrorCode PetscPartitionerDestroy_Gather(PetscPartitioner part)
1035 {
1036   PetscPartitioner_Gather *p = (PetscPartitioner_Gather *) part->data;
1037   PetscErrorCode          ierr;
1038 
1039   PetscFunctionBegin;
1040   ierr = PetscFree(p);CHKERRQ(ierr);
1041   PetscFunctionReturn(0);
1042 }
1043 
1044 PetscErrorCode PetscPartitionerView_Gather_Ascii(PetscPartitioner part, PetscViewer viewer)
1045 {
1046   PetscViewerFormat format;
1047   PetscErrorCode    ierr;
1048 
1049   PetscFunctionBegin;
1050   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
1051   ierr = PetscViewerASCIIPrintf(viewer, "Gather Graph Partitioner:\n");CHKERRQ(ierr);
1052   PetscFunctionReturn(0);
1053 }
1054 
1055 PetscErrorCode PetscPartitionerView_Gather(PetscPartitioner part, PetscViewer viewer)
1056 {
1057   PetscBool      iascii;
1058   PetscErrorCode ierr;
1059 
1060   PetscFunctionBegin;
1061   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1062   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1063   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1064   if (iascii) {ierr = PetscPartitionerView_Gather_Ascii(part, viewer);CHKERRQ(ierr);}
1065   PetscFunctionReturn(0);
1066 }
1067 
1068 PetscErrorCode PetscPartitionerPartition_Gather(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1069 {
1070   PetscInt       np;
1071   PetscErrorCode ierr;
1072 
1073   PetscFunctionBegin;
1074   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1075   ierr = ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);CHKERRQ(ierr);
1076   ierr = PetscSectionSetDof(partSection,0,numVertices);CHKERRQ(ierr);
1077   for (np = 1; np < nparts; ++np) {ierr = PetscSectionSetDof(partSection, np, 0);CHKERRQ(ierr);}
1078   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1079   PetscFunctionReturn(0);
1080 }
1081 
1082 PetscErrorCode PetscPartitionerInitialize_Gather(PetscPartitioner part)
1083 {
1084   PetscFunctionBegin;
1085   part->ops->view      = PetscPartitionerView_Gather;
1086   part->ops->destroy   = PetscPartitionerDestroy_Gather;
1087   part->ops->partition = PetscPartitionerPartition_Gather;
1088   PetscFunctionReturn(0);
1089 }
1090 
1091 /*MC
1092   PETSCPARTITIONERGATHER = "gather" - A PetscPartitioner object
1093 
1094   Level: intermediate
1095 
1096 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1097 M*/
1098 
1099 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Gather(PetscPartitioner part)
1100 {
1101   PetscPartitioner_Gather *p;
1102   PetscErrorCode           ierr;
1103 
1104   PetscFunctionBegin;
1105   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1106   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1107   part->data = p;
1108 
1109   ierr = PetscPartitionerInitialize_Gather(part);CHKERRQ(ierr);
1110   PetscFunctionReturn(0);
1111 }
1112 
1113 
1114 PetscErrorCode PetscPartitionerDestroy_Chaco(PetscPartitioner part)
1115 {
1116   PetscPartitioner_Chaco *p = (PetscPartitioner_Chaco *) part->data;
1117   PetscErrorCode          ierr;
1118 
1119   PetscFunctionBegin;
1120   ierr = PetscFree(p);CHKERRQ(ierr);
1121   PetscFunctionReturn(0);
1122 }
1123 
1124 PetscErrorCode PetscPartitionerView_Chaco_Ascii(PetscPartitioner part, PetscViewer viewer)
1125 {
1126   PetscViewerFormat format;
1127   PetscErrorCode    ierr;
1128 
1129   PetscFunctionBegin;
1130   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
1131   ierr = PetscViewerASCIIPrintf(viewer, "Chaco Graph Partitioner:\n");CHKERRQ(ierr);
1132   PetscFunctionReturn(0);
1133 }
1134 
1135 PetscErrorCode PetscPartitionerView_Chaco(PetscPartitioner part, PetscViewer viewer)
1136 {
1137   PetscBool      iascii;
1138   PetscErrorCode ierr;
1139 
1140   PetscFunctionBegin;
1141   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1142   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1143   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1144   if (iascii) {ierr = PetscPartitionerView_Chaco_Ascii(part, viewer);CHKERRQ(ierr);}
1145   PetscFunctionReturn(0);
1146 }
1147 
1148 #if defined(PETSC_HAVE_CHACO)
1149 #if defined(PETSC_HAVE_UNISTD_H)
1150 #include <unistd.h>
1151 #endif
1152 #if defined(PETSC_HAVE_CHACO_INT_ASSIGNMENT)
1153 #include <chaco.h>
1154 #else
1155 /* Older versions of Chaco do not have an include file */
1156 PETSC_EXTERN int interface(int nvtxs, int *start, int *adjacency, int *vwgts,
1157                        float *ewgts, float *x, float *y, float *z, char *outassignname,
1158                        char *outfilename, short *assignment, int architecture, int ndims_tot,
1159                        int mesh_dims[3], double *goal, int global_method, int local_method,
1160                        int rqi_flag, int vmax, int ndims, double eigtol, long seed);
1161 #endif
1162 extern int FREE_GRAPH;
1163 #endif
1164 
1165 PetscErrorCode PetscPartitionerPartition_Chaco(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1166 {
1167 #if defined(PETSC_HAVE_CHACO)
1168   enum {DEFAULT_METHOD = 1, INERTIAL_METHOD = 3};
1169   MPI_Comm       comm;
1170   int            nvtxs          = numVertices; /* number of vertices in full graph */
1171   int           *vwgts          = NULL;   /* weights for all vertices */
1172   float         *ewgts          = NULL;   /* weights for all edges */
1173   float         *x              = NULL, *y = NULL, *z = NULL; /* coordinates for inertial method */
1174   char          *outassignname  = NULL;   /*  name of assignment output file */
1175   char          *outfilename    = NULL;   /* output file name */
1176   int            architecture   = 1;      /* 0 => hypercube, d => d-dimensional mesh */
1177   int            ndims_tot      = 0;      /* total number of cube dimensions to divide */
1178   int            mesh_dims[3];            /* dimensions of mesh of processors */
1179   double        *goal          = NULL;    /* desired set sizes for each set */
1180   int            global_method = 1;       /* global partitioning algorithm */
1181   int            local_method  = 1;       /* local partitioning algorithm */
1182   int            rqi_flag      = 0;       /* should I use RQI/Symmlq eigensolver? */
1183   int            vmax          = 200;     /* how many vertices to coarsen down to? */
1184   int            ndims         = 1;       /* number of eigenvectors (2^d sets) */
1185   double         eigtol        = 0.001;   /* tolerance on eigenvectors */
1186   long           seed          = 123636512; /* for random graph mutations */
1187 #if defined(PETSC_HAVE_CHACO_INT_ASSIGNMENT)
1188   int           *assignment;              /* Output partition */
1189 #else
1190   short int     *assignment;              /* Output partition */
1191 #endif
1192   int            fd_stdout, fd_pipe[2];
1193   PetscInt      *points;
1194   int            i, v, p;
1195   PetscErrorCode ierr;
1196 
1197   PetscFunctionBegin;
1198   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
1199   if (!numVertices) {
1200     ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1201     ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1202     ierr = ISCreateGeneral(comm, 0, NULL, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1203     PetscFunctionReturn(0);
1204   }
1205   FREE_GRAPH = 0;                         /* Do not let Chaco free my memory */
1206   for (i = 0; i < start[numVertices]; ++i) ++adjacency[i];
1207 
1208   if (global_method == INERTIAL_METHOD) {
1209     /* manager.createCellCoordinates(nvtxs, &x, &y, &z); */
1210     SETERRQ(comm, PETSC_ERR_SUP, "Inertial partitioning not yet supported");
1211   }
1212   mesh_dims[0] = nparts;
1213   mesh_dims[1] = 1;
1214   mesh_dims[2] = 1;
1215   ierr = PetscMalloc1(nvtxs, &assignment);CHKERRQ(ierr);
1216   /* Chaco outputs to stdout. We redirect this to a buffer. */
1217   /* TODO: check error codes for UNIX calls */
1218 #if defined(PETSC_HAVE_UNISTD_H)
1219   {
1220     int piperet;
1221     piperet = pipe(fd_pipe);
1222     if (piperet) SETERRQ(comm,PETSC_ERR_SYS,"Could not create pipe");
1223     fd_stdout = dup(1);
1224     close(1);
1225     dup2(fd_pipe[1], 1);
1226   }
1227 #endif
1228   ierr = interface(nvtxs, (int*) start, (int*) adjacency, vwgts, ewgts, x, y, z, outassignname, outfilename,
1229                    assignment, architecture, ndims_tot, mesh_dims, goal, global_method, local_method, rqi_flag,
1230                    vmax, ndims, eigtol, seed);
1231 #if defined(PETSC_HAVE_UNISTD_H)
1232   {
1233     char msgLog[10000];
1234     int  count;
1235 
1236     fflush(stdout);
1237     count = read(fd_pipe[0], msgLog, (10000-1)*sizeof(char));
1238     if (count < 0) count = 0;
1239     msgLog[count] = 0;
1240     close(1);
1241     dup2(fd_stdout, 1);
1242     close(fd_stdout);
1243     close(fd_pipe[0]);
1244     close(fd_pipe[1]);
1245     if (ierr) SETERRQ1(comm, PETSC_ERR_LIB, "Error in Chaco library: %s", msgLog);
1246   }
1247 #endif
1248   /* Convert to PetscSection+IS */
1249   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1250   for (v = 0; v < nvtxs; ++v) {
1251     ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);
1252   }
1253   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1254   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
1255   for (p = 0, i = 0; p < nparts; ++p) {
1256     for (v = 0; v < nvtxs; ++v) {
1257       if (assignment[v] == p) points[i++] = v;
1258     }
1259   }
1260   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1261   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1262   if (global_method == INERTIAL_METHOD) {
1263     /* manager.destroyCellCoordinates(nvtxs, &x, &y, &z); */
1264   }
1265   ierr = PetscFree(assignment);CHKERRQ(ierr);
1266   for (i = 0; i < start[numVertices]; ++i) --adjacency[i];
1267   PetscFunctionReturn(0);
1268 #else
1269   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-chaco.");
1270 #endif
1271 }
1272 
1273 PetscErrorCode PetscPartitionerInitialize_Chaco(PetscPartitioner part)
1274 {
1275   PetscFunctionBegin;
1276   part->ops->view      = PetscPartitionerView_Chaco;
1277   part->ops->destroy   = PetscPartitionerDestroy_Chaco;
1278   part->ops->partition = PetscPartitionerPartition_Chaco;
1279   PetscFunctionReturn(0);
1280 }
1281 
1282 /*MC
1283   PETSCPARTITIONERCHACO = "chaco" - A PetscPartitioner object using the Chaco library
1284 
1285   Level: intermediate
1286 
1287 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1288 M*/
1289 
1290 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Chaco(PetscPartitioner part)
1291 {
1292   PetscPartitioner_Chaco *p;
1293   PetscErrorCode          ierr;
1294 
1295   PetscFunctionBegin;
1296   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1297   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1298   part->data = p;
1299 
1300   ierr = PetscPartitionerInitialize_Chaco(part);CHKERRQ(ierr);
1301   ierr = PetscCitationsRegister(ChacoPartitionerCitation, &ChacoPartitionercite);CHKERRQ(ierr);
1302   PetscFunctionReturn(0);
1303 }
1304 
1305 PetscErrorCode PetscPartitionerDestroy_ParMetis(PetscPartitioner part)
1306 {
1307   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
1308   PetscErrorCode             ierr;
1309 
1310   PetscFunctionBegin;
1311   ierr = PetscFree(p);CHKERRQ(ierr);
1312   PetscFunctionReturn(0);
1313 }
1314 
1315 PetscErrorCode PetscPartitionerView_ParMetis_Ascii(PetscPartitioner part, PetscViewer viewer)
1316 {
1317   PetscViewerFormat format;
1318   PetscErrorCode    ierr;
1319 
1320   PetscFunctionBegin;
1321   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
1322   ierr = PetscViewerASCIIPrintf(viewer, "ParMetis Graph Partitioner:\n");CHKERRQ(ierr);
1323   PetscFunctionReturn(0);
1324 }
1325 
1326 PetscErrorCode PetscPartitionerView_ParMetis(PetscPartitioner part, PetscViewer viewer)
1327 {
1328   PetscBool      iascii;
1329   PetscErrorCode ierr;
1330 
1331   PetscFunctionBegin;
1332   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1333   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1334   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1335   if (iascii) {ierr = PetscPartitionerView_ParMetis_Ascii(part, viewer);CHKERRQ(ierr);}
1336   PetscFunctionReturn(0);
1337 }
1338 
1339 #if defined(PETSC_HAVE_PARMETIS)
1340 #include <parmetis.h>
1341 #endif
1342 
1343 PetscErrorCode PetscPartitionerPartition_ParMetis(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1344 {
1345 #if defined(PETSC_HAVE_PARMETIS)
1346   MPI_Comm       comm;
1347   PetscSection   section;
1348   PetscInt       nvtxs      = numVertices; /* The number of vertices in full graph */
1349   PetscInt      *vtxdist;                  /* Distribution of vertices across processes */
1350   PetscInt      *xadj       = start;       /* Start of edge list for each vertex */
1351   PetscInt      *adjncy     = adjacency;   /* Edge lists for all vertices */
1352   PetscInt      *vwgt       = NULL;        /* Vertex weights */
1353   PetscInt      *adjwgt     = NULL;        /* Edge weights */
1354   PetscInt       wgtflag    = 0;           /* Indicates which weights are present */
1355   PetscInt       numflag    = 0;           /* Indicates initial offset (0 or 1) */
1356   PetscInt       ncon       = 1;           /* The number of weights per vertex */
1357   PetscReal     *tpwgts;                   /* The fraction of vertex weights assigned to each partition */
1358   PetscReal     *ubvec;                    /* The balance intolerance for vertex weights */
1359   PetscInt       options[5];               /* Options */
1360   /* Outputs */
1361   PetscInt       edgeCut;                  /* The number of edges cut by the partition */
1362   PetscInt      *assignment, *points;
1363   PetscMPIInt    rank, p, v, i;
1364   PetscErrorCode ierr;
1365 
1366   PetscFunctionBegin;
1367   ierr = PetscObjectGetComm((PetscObject) part, &comm);CHKERRQ(ierr);
1368   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1369   options[0] = 0; /* Use all defaults */
1370   /* Calculate vertex distribution */
1371   ierr = PetscMalloc5(nparts+1,&vtxdist,nparts*ncon,&tpwgts,ncon,&ubvec,nvtxs,&assignment,nvtxs,&vwgt);CHKERRQ(ierr);
1372   vtxdist[0] = 0;
1373   ierr = MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm);CHKERRQ(ierr);
1374   for (p = 2; p <= nparts; ++p) {
1375     vtxdist[p] += vtxdist[p-1];
1376   }
1377   /* Calculate weights */
1378   for (p = 0; p < nparts; ++p) {
1379     tpwgts[p] = 1.0/nparts;
1380   }
1381   ubvec[0] = 1.05;
1382   /* Weight cells by dofs on cell by default */
1383   ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
1384   if (section) {
1385     PetscInt cStart, cEnd, dof;
1386 
1387     ierr = DMPlexGetHeightStratum(dm, 0, &cStart, &cEnd);CHKERRQ(ierr);
1388     for (v = cStart; v < cEnd; ++v) {
1389       ierr = PetscSectionGetDof(section, v, &dof);CHKERRQ(ierr);
1390       vwgt[v-cStart] = PetscMax(dof, 1);
1391     }
1392   } else {
1393     for (v = 0; v < nvtxs; ++v) vwgt[v] = 1;
1394   }
1395 
1396   if (nparts == 1) {
1397     ierr = PetscMemzero(assignment, nvtxs * sizeof(PetscInt));CHKERRQ(ierr);
1398   } else {
1399     if (vtxdist[1] == vtxdist[nparts]) {
1400       if (!rank) {
1401         PetscStackPush("METIS_PartGraphKway");
1402         ierr = METIS_PartGraphKway(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, adjwgt, &nparts, tpwgts, ubvec, NULL, &edgeCut, assignment);
1403         PetscStackPop;
1404         if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphKway()");
1405       }
1406     } else {
1407       PetscStackPush("ParMETIS_V3_PartKway");
1408       ierr = ParMETIS_V3_PartKway(vtxdist, xadj, adjncy, vwgt, adjwgt, &wgtflag, &numflag, &ncon, &nparts, tpwgts, ubvec, options, &edgeCut, assignment, &comm);
1409       PetscStackPop;
1410       if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in ParMETIS_V3_PartKway()");
1411     }
1412   }
1413   /* Convert to PetscSection+IS */
1414   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1415   for (v = 0; v < nvtxs; ++v) {ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);}
1416   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1417   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
1418   for (p = 0, i = 0; p < nparts; ++p) {
1419     for (v = 0; v < nvtxs; ++v) {
1420       if (assignment[v] == p) points[i++] = v;
1421     }
1422   }
1423   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1424   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1425   ierr = PetscFree5(vtxdist,tpwgts,ubvec,assignment,vwgt);CHKERRQ(ierr);
1426   PetscFunctionReturn(0);
1427 #else
1428   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-parmetis.");
1429 #endif
1430 }
1431 
1432 PetscErrorCode PetscPartitionerInitialize_ParMetis(PetscPartitioner part)
1433 {
1434   PetscFunctionBegin;
1435   part->ops->view      = PetscPartitionerView_ParMetis;
1436   part->ops->destroy   = PetscPartitionerDestroy_ParMetis;
1437   part->ops->partition = PetscPartitionerPartition_ParMetis;
1438   PetscFunctionReturn(0);
1439 }
1440 
1441 /*MC
1442   PETSCPARTITIONERPARMETIS = "parmetis" - A PetscPartitioner object using the ParMetis library
1443 
1444   Level: intermediate
1445 
1446 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1447 M*/
1448 
1449 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_ParMetis(PetscPartitioner part)
1450 {
1451   PetscPartitioner_ParMetis *p;
1452   PetscErrorCode          ierr;
1453 
1454   PetscFunctionBegin;
1455   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1456   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1457   part->data = p;
1458 
1459   ierr = PetscPartitionerInitialize_ParMetis(part);CHKERRQ(ierr);
1460   ierr = PetscCitationsRegister(ParMetisPartitionerCitation, &ParMetisPartitionercite);CHKERRQ(ierr);
1461   PetscFunctionReturn(0);
1462 }
1463 
1464 /*@
1465   DMPlexGetPartitioner - Get the mesh partitioner
1466 
1467   Not collective
1468 
1469   Input Parameter:
1470 . dm - The DM
1471 
1472   Output Parameter:
1473 . part - The PetscPartitioner
1474 
1475   Level: developer
1476 
1477   Note: This gets a borrowed reference, so the user should not destroy this PetscPartitioner.
1478 
1479 .seealso DMPlexDistribute(), DMPlexSetPartitioner(), PetscPartitionerCreate()
1480 @*/
1481 PetscErrorCode DMPlexGetPartitioner(DM dm, PetscPartitioner *part)
1482 {
1483   DM_Plex *mesh = (DM_Plex *) dm->data;
1484 
1485   PetscFunctionBegin;
1486   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1487   PetscValidPointer(part, 2);
1488   *part = mesh->partitioner;
1489   PetscFunctionReturn(0);
1490 }
1491 
1492 /*@
1493   DMPlexSetPartitioner - Set the mesh partitioner
1494 
1495   logically collective on dm and part
1496 
1497   Input Parameters:
1498 + dm - The DM
1499 - part - The partitioner
1500 
1501   Level: developer
1502 
1503   Note: Any existing PetscPartitioner will be destroyed.
1504 
1505 .seealso DMPlexDistribute(), DMPlexGetPartitioner(), PetscPartitionerCreate()
1506 @*/
1507 PetscErrorCode DMPlexSetPartitioner(DM dm, PetscPartitioner part)
1508 {
1509   DM_Plex       *mesh = (DM_Plex *) dm->data;
1510   PetscErrorCode ierr;
1511 
1512   PetscFunctionBegin;
1513   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1514   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 2);
1515   ierr = PetscObjectReference((PetscObject)part);CHKERRQ(ierr);
1516   ierr = PetscPartitionerDestroy(&mesh->partitioner);CHKERRQ(ierr);
1517   mesh->partitioner = part;
1518   PetscFunctionReturn(0);
1519 }
1520 
1521 static PetscErrorCode DMPlexPartitionLabelClosure_Tree(DM dm, DMLabel label, PetscInt rank, PetscInt point, PetscBool up, PetscBool down)
1522 {
1523   PetscErrorCode ierr;
1524 
1525   PetscFunctionBegin;
1526   if (up) {
1527     PetscInt parent;
1528 
1529     ierr = DMPlexGetTreeParent(dm,point,&parent,NULL);CHKERRQ(ierr);
1530     if (parent != point) {
1531       PetscInt closureSize, *closure = NULL, i;
1532 
1533       ierr = DMPlexGetTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);CHKERRQ(ierr);
1534       for (i = 0; i < closureSize; i++) {
1535         PetscInt cpoint = closure[2*i];
1536 
1537         ierr = DMLabelSetValue(label,cpoint,rank);CHKERRQ(ierr);
1538         ierr = DMPlexPartitionLabelClosure_Tree(dm,label,rank,cpoint,PETSC_TRUE,PETSC_FALSE);CHKERRQ(ierr);
1539       }
1540       ierr = DMPlexRestoreTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);CHKERRQ(ierr);
1541     }
1542   }
1543   if (down) {
1544     PetscInt numChildren;
1545     const PetscInt *children;
1546 
1547     ierr = DMPlexGetTreeChildren(dm,point,&numChildren,&children);CHKERRQ(ierr);
1548     if (numChildren) {
1549       PetscInt i;
1550 
1551       for (i = 0; i < numChildren; i++) {
1552         PetscInt cpoint = children[i];
1553 
1554         ierr = DMLabelSetValue(label,cpoint,rank);CHKERRQ(ierr);
1555         ierr = DMPlexPartitionLabelClosure_Tree(dm,label,rank,cpoint,PETSC_FALSE,PETSC_TRUE);CHKERRQ(ierr);
1556       }
1557     }
1558   }
1559   PetscFunctionReturn(0);
1560 }
1561 
1562 /*@
1563   DMPlexPartitionLabelClosure - Add the closure of all points to the partition label
1564 
1565   Input Parameters:
1566 + dm     - The DM
1567 - label  - DMLabel assinging ranks to remote roots
1568 
1569   Level: developer
1570 
1571 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1572 @*/
1573 PetscErrorCode DMPlexPartitionLabelClosure(DM dm, DMLabel label)
1574 {
1575   IS              rankIS,   pointIS;
1576   const PetscInt *ranks,   *points;
1577   PetscInt        numRanks, numPoints, r, p, c, closureSize;
1578   PetscInt       *closure = NULL;
1579   PetscErrorCode  ierr;
1580 
1581   PetscFunctionBegin;
1582   ierr = DMLabelGetValueIS(label, &rankIS);CHKERRQ(ierr);
1583   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
1584   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
1585   for (r = 0; r < numRanks; ++r) {
1586     const PetscInt rank = ranks[r];
1587 
1588     ierr = DMLabelGetStratumIS(label, rank, &pointIS);CHKERRQ(ierr);
1589     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
1590     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
1591     for (p = 0; p < numPoints; ++p) {
1592       ierr = DMPlexGetTransitiveClosure(dm, points[p], PETSC_TRUE, &closureSize, &closure);CHKERRQ(ierr);
1593       for (c = 0; c < closureSize*2; c += 2) {
1594         ierr = DMLabelSetValue(label, closure[c], rank);CHKERRQ(ierr);
1595         ierr = DMPlexPartitionLabelClosure_Tree(dm,label,rank,closure[c],PETSC_TRUE,PETSC_TRUE);CHKERRQ(ierr);
1596       }
1597     }
1598     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
1599     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1600   }
1601   if (closure) {ierr = DMPlexRestoreTransitiveClosure(dm, 0, PETSC_TRUE, &closureSize, &closure);CHKERRQ(ierr);}
1602   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
1603   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
1604   PetscFunctionReturn(0);
1605 }
1606 
1607 /*@
1608   DMPlexPartitionLabelAdjacency - Add one level of adjacent points to the partition label
1609 
1610   Input Parameters:
1611 + dm     - The DM
1612 - label  - DMLabel assinging ranks to remote roots
1613 
1614   Level: developer
1615 
1616 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1617 @*/
1618 PetscErrorCode DMPlexPartitionLabelAdjacency(DM dm, DMLabel label)
1619 {
1620   IS              rankIS,   pointIS;
1621   const PetscInt *ranks,   *points;
1622   PetscInt        numRanks, numPoints, r, p, a, adjSize;
1623   PetscInt       *adj = NULL;
1624   PetscErrorCode  ierr;
1625 
1626   PetscFunctionBegin;
1627   ierr = DMLabelGetValueIS(label, &rankIS);CHKERRQ(ierr);
1628   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
1629   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
1630   for (r = 0; r < numRanks; ++r) {
1631     const PetscInt rank = ranks[r];
1632 
1633     ierr = DMLabelGetStratumIS(label, rank, &pointIS);CHKERRQ(ierr);
1634     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
1635     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
1636     for (p = 0; p < numPoints; ++p) {
1637       adjSize = PETSC_DETERMINE;
1638       ierr = DMPlexGetAdjacency(dm, points[p], &adjSize, &adj);CHKERRQ(ierr);
1639       for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(label, adj[a], rank);CHKERRQ(ierr);}
1640     }
1641     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
1642     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1643   }
1644   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
1645   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
1646   ierr = PetscFree(adj);CHKERRQ(ierr);
1647   PetscFunctionReturn(0);
1648 }
1649 
1650 /*@
1651   DMPlexPartitionLabelPropagate - Propagate points in a partition label over the point SF
1652 
1653   Input Parameters:
1654 + dm     - The DM
1655 - label  - DMLabel assinging ranks to remote roots
1656 
1657   Level: developer
1658 
1659   Note: This is required when generating multi-level overlaps to capture
1660   overlap points from non-neighbouring partitions.
1661 
1662 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1663 @*/
1664 PetscErrorCode DMPlexPartitionLabelPropagate(DM dm, DMLabel label)
1665 {
1666   MPI_Comm        comm;
1667   PetscMPIInt     rank;
1668   PetscSF         sfPoint;
1669   DMLabel         lblRoots, lblLeaves;
1670   IS              rankIS, pointIS;
1671   const PetscInt *ranks;
1672   PetscInt        numRanks, r;
1673   PetscErrorCode  ierr;
1674 
1675   PetscFunctionBegin;
1676   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
1677   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1678   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
1679   /* Pull point contributions from remote leaves into local roots */
1680   ierr = DMLabelGather(label, sfPoint, &lblLeaves);CHKERRQ(ierr);
1681   ierr = DMLabelGetValueIS(lblLeaves, &rankIS);CHKERRQ(ierr);
1682   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
1683   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
1684   for (r = 0; r < numRanks; ++r) {
1685     const PetscInt remoteRank = ranks[r];
1686     if (remoteRank == rank) continue;
1687     ierr = DMLabelGetStratumIS(lblLeaves, remoteRank, &pointIS);CHKERRQ(ierr);
1688     ierr = DMLabelInsertIS(label, pointIS, remoteRank);CHKERRQ(ierr);
1689     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1690   }
1691   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
1692   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
1693   ierr = DMLabelDestroy(&lblLeaves);CHKERRQ(ierr);
1694   /* Push point contributions from roots into remote leaves */
1695   ierr = DMLabelDistribute(label, sfPoint, &lblRoots);CHKERRQ(ierr);
1696   ierr = DMLabelGetValueIS(lblRoots, &rankIS);CHKERRQ(ierr);
1697   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
1698   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
1699   for (r = 0; r < numRanks; ++r) {
1700     const PetscInt remoteRank = ranks[r];
1701     if (remoteRank == rank) continue;
1702     ierr = DMLabelGetStratumIS(lblRoots, remoteRank, &pointIS);CHKERRQ(ierr);
1703     ierr = DMLabelInsertIS(label, pointIS, remoteRank);CHKERRQ(ierr);
1704     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1705   }
1706   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
1707   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
1708   ierr = DMLabelDestroy(&lblRoots);CHKERRQ(ierr);
1709   PetscFunctionReturn(0);
1710 }
1711 
1712 /*@
1713   DMPlexPartitionLabelInvert - Create a partition label of remote roots from a local root label
1714 
1715   Input Parameters:
1716 + dm        - The DM
1717 . rootLabel - DMLabel assinging ranks to local roots
1718 . processSF - A star forest mapping into the local index on each remote rank
1719 
1720   Output Parameter:
1721 - leafLabel - DMLabel assinging ranks to remote roots
1722 
1723   Note: The rootLabel defines a send pattern by mapping local points to remote target ranks. The
1724   resulting leafLabel is a receiver mapping of remote roots to their parent rank.
1725 
1726   Level: developer
1727 
1728 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1729 @*/
1730 PetscErrorCode DMPlexPartitionLabelInvert(DM dm, DMLabel rootLabel, PetscSF processSF, DMLabel leafLabel)
1731 {
1732   MPI_Comm           comm;
1733   PetscMPIInt        rank, numProcs;
1734   PetscInt           p, n, numNeighbors, size, l, nleaves;
1735   PetscSF            sfPoint;
1736   PetscSFNode       *rootPoints, *leafPoints;
1737   PetscSection       rootSection, leafSection;
1738   const PetscSFNode *remote;
1739   const PetscInt    *local, *neighbors;
1740   IS                 valueIS;
1741   PetscErrorCode     ierr;
1742 
1743   PetscFunctionBegin;
1744   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
1745   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1746   ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr);
1747   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
1748 
1749   /* Convert to (point, rank) and use actual owners */
1750   ierr = PetscSectionCreate(comm, &rootSection);CHKERRQ(ierr);
1751   ierr = PetscSectionSetChart(rootSection, 0, numProcs);CHKERRQ(ierr);
1752   ierr = DMLabelGetValueIS(rootLabel, &valueIS);CHKERRQ(ierr);
1753   ierr = ISGetLocalSize(valueIS, &numNeighbors);CHKERRQ(ierr);
1754   ierr = ISGetIndices(valueIS, &neighbors);CHKERRQ(ierr);
1755   for (n = 0; n < numNeighbors; ++n) {
1756     PetscInt numPoints;
1757 
1758     ierr = DMLabelGetStratumSize(rootLabel, neighbors[n], &numPoints);CHKERRQ(ierr);
1759     ierr = PetscSectionAddDof(rootSection, neighbors[n], numPoints);CHKERRQ(ierr);
1760   }
1761   ierr = PetscSectionSetUp(rootSection);CHKERRQ(ierr);
1762   ierr = PetscSectionGetStorageSize(rootSection, &size);CHKERRQ(ierr);
1763   ierr = PetscMalloc1(size, &rootPoints);CHKERRQ(ierr);
1764   ierr = PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);CHKERRQ(ierr);
1765   for (n = 0; n < numNeighbors; ++n) {
1766     IS              pointIS;
1767     const PetscInt *points;
1768     PetscInt        off, numPoints, p;
1769 
1770     ierr = PetscSectionGetOffset(rootSection, neighbors[n], &off);CHKERRQ(ierr);
1771     ierr = DMLabelGetStratumIS(rootLabel, neighbors[n], &pointIS);CHKERRQ(ierr);
1772     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
1773     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
1774     for (p = 0; p < numPoints; ++p) {
1775       if (local) {ierr = PetscFindInt(points[p], nleaves, local, &l);CHKERRQ(ierr);}
1776       else       {l = -1;}
1777       if (l >= 0) {rootPoints[off+p] = remote[l];}
1778       else        {rootPoints[off+p].index = points[p]; rootPoints[off+p].rank = rank;}
1779     }
1780     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
1781     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1782   }
1783   ierr = ISRestoreIndices(valueIS, &neighbors);CHKERRQ(ierr);
1784   ierr = ISDestroy(&valueIS);CHKERRQ(ierr);
1785   /* Communicate overlap */
1786   ierr = PetscSectionCreate(comm, &leafSection);CHKERRQ(ierr);
1787   ierr = DMPlexDistributeData(dm, processSF, rootSection, MPIU_2INT, rootPoints, leafSection, (void**) &leafPoints);CHKERRQ(ierr);
1788   /* Filter remote contributions (ovLeafPoints) into the overlapSF */
1789   ierr = PetscSectionGetStorageSize(leafSection, &size);CHKERRQ(ierr);
1790   for (p = 0; p < size; p++) {
1791     ierr = DMLabelSetValue(leafLabel, leafPoints[p].index, leafPoints[p].rank);CHKERRQ(ierr);
1792   }
1793   ierr = PetscFree(rootPoints);CHKERRQ(ierr);
1794   ierr = PetscSectionDestroy(&rootSection);CHKERRQ(ierr);
1795   ierr = PetscFree(leafPoints);CHKERRQ(ierr);
1796   ierr = PetscSectionDestroy(&leafSection);CHKERRQ(ierr);
1797   PetscFunctionReturn(0);
1798 }
1799 
1800 /*@
1801   DMPlexPartitionLabelCreateSF - Create a star forest from a label that assigns ranks to points
1802 
1803   Input Parameters:
1804 + dm    - The DM
1805 . label - DMLabel assinging ranks to remote roots
1806 
1807   Output Parameter:
1808 - sf    - The star forest communication context encapsulating the defined mapping
1809 
1810   Note: The incoming label is a receiver mapping of remote points to their parent rank.
1811 
1812   Level: developer
1813 
1814 .seealso: DMPlexDistribute(), DMPlexCreateOverlap
1815 @*/
1816 PetscErrorCode DMPlexPartitionLabelCreateSF(DM dm, DMLabel label, PetscSF *sf)
1817 {
1818   PetscMPIInt     rank, numProcs;
1819   PetscInt        n, numRemote, p, numPoints, pStart, pEnd, idx = 0;
1820   PetscSFNode    *remotePoints;
1821   IS              remoteRootIS;
1822   const PetscInt *remoteRoots;
1823   PetscErrorCode ierr;
1824 
1825   PetscFunctionBegin;
1826   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
1827   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &numProcs);CHKERRQ(ierr);
1828 
1829   for (numRemote = 0, n = 0; n < numProcs; ++n) {
1830     ierr = DMLabelGetStratumSize(label, n, &numPoints);CHKERRQ(ierr);
1831     numRemote += numPoints;
1832   }
1833   ierr = PetscMalloc1(numRemote, &remotePoints);CHKERRQ(ierr);
1834   /* Put owned points first */
1835   ierr = DMLabelGetStratumSize(label, rank, &numPoints);CHKERRQ(ierr);
1836   if (numPoints > 0) {
1837     ierr = DMLabelGetStratumIS(label, rank, &remoteRootIS);CHKERRQ(ierr);
1838     ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
1839     for (p = 0; p < numPoints; p++) {
1840       remotePoints[idx].index = remoteRoots[p];
1841       remotePoints[idx].rank = rank;
1842       idx++;
1843     }
1844     ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
1845     ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr);
1846   }
1847   /* Now add remote points */
1848   for (n = 0; n < numProcs; ++n) {
1849     ierr = DMLabelGetStratumSize(label, n, &numPoints);CHKERRQ(ierr);
1850     if (numPoints <= 0 || n == rank) continue;
1851     ierr = DMLabelGetStratumIS(label, n, &remoteRootIS);CHKERRQ(ierr);
1852     ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
1853     for (p = 0; p < numPoints; p++) {
1854       remotePoints[idx].index = remoteRoots[p];
1855       remotePoints[idx].rank = n;
1856       idx++;
1857     }
1858     ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
1859     ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr);
1860   }
1861   ierr = PetscSFCreate(PetscObjectComm((PetscObject) dm), sf);CHKERRQ(ierr);
1862   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
1863   ierr = PetscSFSetGraph(*sf, pEnd-pStart, numRemote, NULL, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);CHKERRQ(ierr);
1864   PetscFunctionReturn(0);
1865 }
1866