xref: /petsc/src/dm/impls/plex/plexpartition.c (revision d39976ab96a144a1bf348e3889c9ba0a858abe9e)
1 #include <petsc/private/dmpleximpl.h>   /*I      "petscdmplex.h"   I*/
2 
3 PetscClassId PETSCPARTITIONER_CLASSID = 0;
4 
5 PetscFunctionList PetscPartitionerList              = NULL;
6 PetscBool         PetscPartitionerRegisterAllCalled = PETSC_FALSE;
7 
8 PetscBool ChacoPartitionercite = PETSC_FALSE;
9 const char ChacoPartitionerCitation[] = "@inproceedings{Chaco95,\n"
10                                "  author    = {Bruce Hendrickson and Robert Leland},\n"
11                                "  title     = {A multilevel algorithm for partitioning graphs},\n"
12                                "  booktitle = {Supercomputing '95: Proceedings of the 1995 ACM/IEEE Conference on Supercomputing (CDROM)},"
13                                "  isbn      = {0-89791-816-9},\n"
14                                "  pages     = {28},\n"
15                                "  doi       = {http://doi.acm.org/10.1145/224170.224228},\n"
16                                "  publisher = {ACM Press},\n"
17                                "  address   = {New York},\n"
18                                "  year      = {1995}\n}\n";
19 
20 PetscBool ParMetisPartitionercite = PETSC_FALSE;
21 const char ParMetisPartitionerCitation[] = "@article{KarypisKumar98,\n"
22                                "  author  = {George Karypis and Vipin Kumar},\n"
23                                "  title   = {A Parallel Algorithm for Multilevel Graph Partitioning and Sparse Matrix Ordering},\n"
24                                "  journal = {Journal of Parallel and Distributed Computing},\n"
25                                "  volume  = {48},\n"
26                                "  pages   = {71--85},\n"
27                                "  year    = {1998}\n}\n";
28 
29 #undef __FUNCT__
30 #define __FUNCT__ "DMPlexCreatePartitionerGraph"
31 /*@C
32   DMPlexCreatePartitionerGraph - Create a CSR graph of point connections for the partitioner
33 
34   Input Parameters:
35 + dm      - The mesh DM dm
36 - height  - Height of the strata from which to construct the graph
37 
38   Output Parameter:
39 + numVertices - Number of vertices in the graph
40 - offsets     - Point offsets in the graph
41 - adjacency   - Point connectivity in the graph
42 
43   The user can control the definition of adjacency for the mesh using DMPlexGetAdjacencyUseCone() and
44   DMPlexSetAdjacencyUseClosure(). They should choose the combination appropriate for the function
45   representation on the mesh.
46 
47   Level: developer
48 
49 .seealso: PetscPartitionerGetType(), PetscPartitionerCreate(), DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure()
50 @*/
51 PetscErrorCode DMPlexCreatePartitionerGraph(DM dm, PetscInt height, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency)
52 {
53   PetscInt       p, pStart, pEnd, a, adjSize, idx, size, nroots;
54   PetscInt      *adj = NULL, *vOffsets = NULL, *graph = NULL;
55   IS             cellNumbering;
56   const PetscInt *cellNum;
57   PetscBool      useCone, useClosure;
58   PetscSection   section;
59   PetscSegBuffer adjBuffer;
60   PetscSF        sfPoint;
61   PetscMPIInt    rank;
62   PetscErrorCode ierr;
63 
64   PetscFunctionBegin;
65   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
66   ierr = DMPlexGetHeightStratum(dm, height, &pStart, &pEnd);CHKERRQ(ierr);
67   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
68   ierr = PetscSFGetGraph(sfPoint, &nroots, NULL, NULL, NULL);CHKERRQ(ierr);
69   /* Build adjacency graph via a section/segbuffer */
70   ierr = PetscSectionCreate(PetscObjectComm((PetscObject) dm), &section);CHKERRQ(ierr);
71   ierr = PetscSectionSetChart(section, pStart, pEnd);CHKERRQ(ierr);
72   ierr = PetscSegBufferCreate(sizeof(PetscInt),1000,&adjBuffer);CHKERRQ(ierr);
73   /* Always use FVM adjacency to create partitioner graph */
74   ierr = DMPlexGetAdjacencyUseCone(dm, &useCone);CHKERRQ(ierr);
75   ierr = DMPlexGetAdjacencyUseClosure(dm, &useClosure);CHKERRQ(ierr);
76   ierr = DMPlexSetAdjacencyUseCone(dm, PETSC_TRUE);CHKERRQ(ierr);
77   ierr = DMPlexSetAdjacencyUseClosure(dm, PETSC_FALSE);CHKERRQ(ierr);
78   ierr = DMPlexCreateCellNumbering_Internal(dm, PETSC_TRUE, &cellNumbering);CHKERRQ(ierr);
79   ierr = ISGetIndices(cellNumbering, &cellNum);CHKERRQ(ierr);
80   for (*numVertices = 0, p = pStart; p < pEnd; p++) {
81     /* Skip non-owned cells in parallel (ParMetis expects no overlap) */
82     if (nroots > 0) {if (cellNum[p] < 0) continue;}
83     adjSize = PETSC_DETERMINE;
84     ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
85     for (a = 0; a < adjSize; ++a) {
86       const PetscInt point = adj[a];
87       if (point != p && pStart <= point && point < pEnd) {
88         PetscInt *PETSC_RESTRICT pBuf;
89         ierr = PetscSectionAddDof(section, p, 1);CHKERRQ(ierr);
90         ierr = PetscSegBufferGetInts(adjBuffer, 1, &pBuf);CHKERRQ(ierr);
91         *pBuf = point;
92       }
93     }
94     (*numVertices)++;
95   }
96   ierr = DMPlexSetAdjacencyUseCone(dm, useCone);CHKERRQ(ierr);
97   ierr = DMPlexSetAdjacencyUseClosure(dm, useClosure);CHKERRQ(ierr);
98   /* Derive CSR graph from section/segbuffer */
99   ierr = PetscSectionSetUp(section);CHKERRQ(ierr);
100   ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr);
101   ierr = PetscMalloc1(*numVertices+1, &vOffsets);CHKERRQ(ierr);
102   for (idx = 0, p = pStart; p < pEnd; p++) {
103     if (nroots > 0) {if (cellNum[p] < 0) continue;}
104     ierr = PetscSectionGetOffset(section, p, &(vOffsets[idx++]));CHKERRQ(ierr);
105   }
106   vOffsets[*numVertices] = size;
107   if (offsets) *offsets = vOffsets;
108   ierr = PetscSegBufferExtractAlloc(adjBuffer, &graph);CHKERRQ(ierr);
109   {
110     ISLocalToGlobalMapping ltogCells;
111     PetscInt n, size, *cells_arr;
112     /* In parallel, apply a global cell numbering to the graph */
113     ierr = ISRestoreIndices(cellNumbering, &cellNum);CHKERRQ(ierr);
114     ierr = ISLocalToGlobalMappingCreateIS(cellNumbering, &ltogCells);CHKERRQ(ierr);
115     ierr = ISLocalToGlobalMappingGetSize(ltogCells, &size);CHKERRQ(ierr);
116     ierr = ISLocalToGlobalMappingGetIndices(ltogCells, (const PetscInt**)&cells_arr);CHKERRQ(ierr);
117     /* Convert to positive global cell numbers */
118     for (n=0; n<size; n++) {if (cells_arr[n] < 0) cells_arr[n] = -(cells_arr[n]+1);}
119     ierr = ISLocalToGlobalMappingRestoreIndices(ltogCells, (const PetscInt**)&cells_arr);CHKERRQ(ierr);
120     ierr = ISLocalToGlobalMappingApplyBlock(ltogCells, vOffsets[*numVertices], graph, graph);CHKERRQ(ierr);
121     ierr = ISLocalToGlobalMappingDestroy(&ltogCells);CHKERRQ(ierr);
122     ierr = ISDestroy(&cellNumbering);CHKERRQ(ierr);
123   }
124   if (adjacency) *adjacency = graph;
125   /* Clean up */
126   ierr = PetscSegBufferDestroy(&adjBuffer);CHKERRQ(ierr);
127   ierr = PetscSectionDestroy(&section);CHKERRQ(ierr);
128   ierr = PetscFree(adj);CHKERRQ(ierr);
129   PetscFunctionReturn(0);
130 }
131 
132 #undef __FUNCT__
133 #define __FUNCT__ "DMPlexCreateNeighborCSR"
134 PetscErrorCode DMPlexCreateNeighborCSR(DM dm, PetscInt cellHeight, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency)
135 {
136   const PetscInt maxFaceCases = 30;
137   PetscInt       numFaceCases = 0;
138   PetscInt       numFaceVertices[30]; /* maxFaceCases, C89 sucks sucks sucks */
139   PetscInt      *off, *adj;
140   PetscInt      *neighborCells = NULL;
141   PetscInt       dim, cellDim, depth = 0, faceDepth, cStart, cEnd, c, numCells, cell;
142   PetscErrorCode ierr;
143 
144   PetscFunctionBegin;
145   /* For parallel partitioning, I think you have to communicate supports */
146   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
147   cellDim = dim - cellHeight;
148   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
149   ierr = DMPlexGetHeightStratum(dm, cellHeight, &cStart, &cEnd);CHKERRQ(ierr);
150   if (cEnd - cStart == 0) {
151     if (numVertices) *numVertices = 0;
152     if (offsets)   *offsets   = NULL;
153     if (adjacency) *adjacency = NULL;
154     PetscFunctionReturn(0);
155   }
156   numCells  = cEnd - cStart;
157   faceDepth = depth - cellHeight;
158   if (dim == depth) {
159     PetscInt f, fStart, fEnd;
160 
161     ierr = PetscCalloc1(numCells+1, &off);CHKERRQ(ierr);
162     /* Count neighboring cells */
163     ierr = DMPlexGetHeightStratum(dm, cellHeight+1, &fStart, &fEnd);CHKERRQ(ierr);
164     for (f = fStart; f < fEnd; ++f) {
165       const PetscInt *support;
166       PetscInt        supportSize;
167       ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
168       ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
169       if (supportSize == 2) {
170         PetscInt numChildren;
171 
172         ierr = DMPlexGetTreeChildren(dm,f,&numChildren,NULL);CHKERRQ(ierr);
173         if (!numChildren) {
174           ++off[support[0]-cStart+1];
175           ++off[support[1]-cStart+1];
176         }
177       }
178     }
179     /* Prefix sum */
180     for (c = 1; c <= numCells; ++c) off[c] += off[c-1];
181     if (adjacency) {
182       PetscInt *tmp;
183 
184       ierr = PetscMalloc1(off[numCells], &adj);CHKERRQ(ierr);
185       ierr = PetscMalloc1(numCells+1, &tmp);CHKERRQ(ierr);
186       ierr = PetscMemcpy(tmp, off, (numCells+1) * sizeof(PetscInt));CHKERRQ(ierr);
187       /* Get neighboring cells */
188       for (f = fStart; f < fEnd; ++f) {
189         const PetscInt *support;
190         PetscInt        supportSize;
191         ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
192         ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
193         if (supportSize == 2) {
194           PetscInt numChildren;
195 
196           ierr = DMPlexGetTreeChildren(dm,f,&numChildren,NULL);CHKERRQ(ierr);
197           if (!numChildren) {
198             adj[tmp[support[0]-cStart]++] = support[1];
199             adj[tmp[support[1]-cStart]++] = support[0];
200           }
201         }
202       }
203 #if defined(PETSC_USE_DEBUG)
204       for (c = 0; c < cEnd-cStart; ++c) if (tmp[c] != off[c+1]) SETERRQ3(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Offset %d != %d for cell %d", tmp[c], off[c], c+cStart);
205 #endif
206       ierr = PetscFree(tmp);CHKERRQ(ierr);
207     }
208     if (numVertices) *numVertices = numCells;
209     if (offsets)   *offsets   = off;
210     if (adjacency) *adjacency = adj;
211     PetscFunctionReturn(0);
212   }
213   /* Setup face recognition */
214   if (faceDepth == 1) {
215     PetscInt cornersSeen[30] = {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0}; /* Could use PetscBT */
216 
217     for (c = cStart; c < cEnd; ++c) {
218       PetscInt corners;
219 
220       ierr = DMPlexGetConeSize(dm, c, &corners);CHKERRQ(ierr);
221       if (!cornersSeen[corners]) {
222         PetscInt nFV;
223 
224         if (numFaceCases >= maxFaceCases) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_PLIB, "Exceeded maximum number of face recognition cases");
225         cornersSeen[corners] = 1;
226 
227         ierr = DMPlexGetNumFaceVertices(dm, cellDim, corners, &nFV);CHKERRQ(ierr);
228 
229         numFaceVertices[numFaceCases++] = nFV;
230       }
231     }
232   }
233   ierr = PetscCalloc1(numCells+1, &off);CHKERRQ(ierr);
234   /* Count neighboring cells */
235   for (cell = cStart; cell < cEnd; ++cell) {
236     PetscInt numNeighbors = PETSC_DETERMINE, n;
237 
238     ierr = DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);CHKERRQ(ierr);
239     /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
240     for (n = 0; n < numNeighbors; ++n) {
241       PetscInt        cellPair[2];
242       PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
243       PetscInt        meetSize = 0;
244       const PetscInt *meet    = NULL;
245 
246       cellPair[0] = cell; cellPair[1] = neighborCells[n];
247       if (cellPair[0] == cellPair[1]) continue;
248       if (!found) {
249         ierr = DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
250         if (meetSize) {
251           PetscInt f;
252 
253           for (f = 0; f < numFaceCases; ++f) {
254             if (numFaceVertices[f] == meetSize) {
255               found = PETSC_TRUE;
256               break;
257             }
258           }
259         }
260         ierr = DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
261       }
262       if (found) ++off[cell-cStart+1];
263     }
264   }
265   /* Prefix sum */
266   for (cell = 1; cell <= numCells; ++cell) off[cell] += off[cell-1];
267 
268   if (adjacency) {
269     ierr = PetscMalloc1(off[numCells], &adj);CHKERRQ(ierr);
270     /* Get neighboring cells */
271     for (cell = cStart; cell < cEnd; ++cell) {
272       PetscInt numNeighbors = PETSC_DETERMINE, n;
273       PetscInt cellOffset   = 0;
274 
275       ierr = DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);CHKERRQ(ierr);
276       /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
277       for (n = 0; n < numNeighbors; ++n) {
278         PetscInt        cellPair[2];
279         PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
280         PetscInt        meetSize = 0;
281         const PetscInt *meet    = NULL;
282 
283         cellPair[0] = cell; cellPair[1] = neighborCells[n];
284         if (cellPair[0] == cellPair[1]) continue;
285         if (!found) {
286           ierr = DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
287           if (meetSize) {
288             PetscInt f;
289 
290             for (f = 0; f < numFaceCases; ++f) {
291               if (numFaceVertices[f] == meetSize) {
292                 found = PETSC_TRUE;
293                 break;
294               }
295             }
296           }
297           ierr = DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
298         }
299         if (found) {
300           adj[off[cell-cStart]+cellOffset] = neighborCells[n];
301           ++cellOffset;
302         }
303       }
304     }
305   }
306   ierr = PetscFree(neighborCells);CHKERRQ(ierr);
307   if (numVertices) *numVertices = numCells;
308   if (offsets)   *offsets   = off;
309   if (adjacency) *adjacency = adj;
310   PetscFunctionReturn(0);
311 }
312 
313 #undef __FUNCT__
314 #define __FUNCT__ "PetscPartitionerRegister"
315 /*@C
316   PetscPartitionerRegister - Adds a new PetscPartitioner implementation
317 
318   Not Collective
319 
320   Input Parameters:
321 + name        - The name of a new user-defined creation routine
322 - create_func - The creation routine itself
323 
324   Notes:
325   PetscPartitionerRegister() may be called multiple times to add several user-defined PetscPartitioners
326 
327   Sample usage:
328 .vb
329     PetscPartitionerRegister("my_part", MyPetscPartitionerCreate);
330 .ve
331 
332   Then, your PetscPartitioner type can be chosen with the procedural interface via
333 .vb
334     PetscPartitionerCreate(MPI_Comm, PetscPartitioner *);
335     PetscPartitionerSetType(PetscPartitioner, "my_part");
336 .ve
337    or at runtime via the option
338 .vb
339     -petscpartitioner_type my_part
340 .ve
341 
342   Level: advanced
343 
344 .keywords: PetscPartitioner, register
345 .seealso: PetscPartitionerRegisterAll(), PetscPartitionerRegisterDestroy()
346 
347 @*/
348 PetscErrorCode PetscPartitionerRegister(const char sname[], PetscErrorCode (*function)(PetscPartitioner))
349 {
350   PetscErrorCode ierr;
351 
352   PetscFunctionBegin;
353   ierr = PetscFunctionListAdd(&PetscPartitionerList, sname, function);CHKERRQ(ierr);
354   PetscFunctionReturn(0);
355 }
356 
357 #undef __FUNCT__
358 #define __FUNCT__ "PetscPartitionerSetType"
359 /*@C
360   PetscPartitionerSetType - Builds a particular PetscPartitioner
361 
362   Collective on PetscPartitioner
363 
364   Input Parameters:
365 + part - The PetscPartitioner object
366 - name - The kind of partitioner
367 
368   Options Database Key:
369 . -petscpartitioner_type <type> - Sets the PetscPartitioner type; use -help for a list of available types
370 
371   Level: intermediate
372 
373 .keywords: PetscPartitioner, set, type
374 .seealso: PetscPartitionerGetType(), PetscPartitionerCreate()
375 @*/
376 PetscErrorCode PetscPartitionerSetType(PetscPartitioner part, PetscPartitionerType name)
377 {
378   PetscErrorCode (*r)(PetscPartitioner);
379   PetscBool      match;
380   PetscErrorCode ierr;
381 
382   PetscFunctionBegin;
383   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
384   ierr = PetscObjectTypeCompare((PetscObject) part, name, &match);CHKERRQ(ierr);
385   if (match) PetscFunctionReturn(0);
386 
387   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
388   ierr = PetscFunctionListFind(PetscPartitionerList, name, &r);CHKERRQ(ierr);
389   if (!r) SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown PetscPartitioner type: %s", name);
390 
391   if (part->ops->destroy) {
392     ierr              = (*part->ops->destroy)(part);CHKERRQ(ierr);
393     part->ops->destroy = NULL;
394   }
395   ierr = (*r)(part);CHKERRQ(ierr);
396   ierr = PetscObjectChangeTypeName((PetscObject) part, name);CHKERRQ(ierr);
397   PetscFunctionReturn(0);
398 }
399 
400 #undef __FUNCT__
401 #define __FUNCT__ "PetscPartitionerGetType"
402 /*@C
403   PetscPartitionerGetType - Gets the PetscPartitioner type name (as a string) from the object.
404 
405   Not Collective
406 
407   Input Parameter:
408 . part - The PetscPartitioner
409 
410   Output Parameter:
411 . name - The PetscPartitioner type name
412 
413   Level: intermediate
414 
415 .keywords: PetscPartitioner, get, type, name
416 .seealso: PetscPartitionerSetType(), PetscPartitionerCreate()
417 @*/
418 PetscErrorCode PetscPartitionerGetType(PetscPartitioner part, PetscPartitionerType *name)
419 {
420   PetscErrorCode ierr;
421 
422   PetscFunctionBegin;
423   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
424   PetscValidPointer(name, 2);
425   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
426   *name = ((PetscObject) part)->type_name;
427   PetscFunctionReturn(0);
428 }
429 
430 #undef __FUNCT__
431 #define __FUNCT__ "PetscPartitionerView"
432 /*@C
433   PetscPartitionerView - Views a PetscPartitioner
434 
435   Collective on PetscPartitioner
436 
437   Input Parameter:
438 + part - the PetscPartitioner object to view
439 - v    - the viewer
440 
441   Level: developer
442 
443 .seealso: PetscPartitionerDestroy()
444 @*/
445 PetscErrorCode PetscPartitionerView(PetscPartitioner part, PetscViewer v)
446 {
447   PetscErrorCode ierr;
448 
449   PetscFunctionBegin;
450   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
451   if (!v) {ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject) part), &v);CHKERRQ(ierr);}
452   if (part->ops->view) {ierr = (*part->ops->view)(part, v);CHKERRQ(ierr);}
453   PetscFunctionReturn(0);
454 }
455 
456 #undef __FUNCT__
457 #define __FUNCT__ "PetscPartitionerSetTypeFromOptions_Internal"
458 PetscErrorCode PetscPartitionerSetTypeFromOptions_Internal(PetscPartitioner part)
459 {
460   const char    *defaultType;
461   char           name[256];
462   PetscBool      flg;
463   PetscErrorCode ierr;
464 
465   PetscFunctionBegin;
466   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
467   if (!((PetscObject) part)->type_name) defaultType = PETSCPARTITIONERCHACO;
468   else                                  defaultType = ((PetscObject) part)->type_name;
469   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
470 
471   ierr = PetscObjectOptionsBegin((PetscObject) part);CHKERRQ(ierr);
472   ierr = PetscOptionsFList("-petscpartitioner_type", "Graph partitioner", "PetscPartitionerSetType", PetscPartitionerList, defaultType, name, 256, &flg);CHKERRQ(ierr);
473   if (flg) {
474     ierr = PetscPartitionerSetType(part, name);CHKERRQ(ierr);
475   } else if (!((PetscObject) part)->type_name) {
476     ierr = PetscPartitionerSetType(part, defaultType);CHKERRQ(ierr);
477   }
478   ierr = PetscOptionsEnd();CHKERRQ(ierr);
479   PetscFunctionReturn(0);
480 }
481 
482 #undef __FUNCT__
483 #define __FUNCT__ "PetscPartitionerSetFromOptions"
484 /*@
485   PetscPartitionerSetFromOptions - sets parameters in a PetscPartitioner from the options database
486 
487   Collective on PetscPartitioner
488 
489   Input Parameter:
490 . part - the PetscPartitioner object to set options for
491 
492   Level: developer
493 
494 .seealso: PetscPartitionerView()
495 @*/
496 PetscErrorCode PetscPartitionerSetFromOptions(PetscPartitioner part)
497 {
498   PetscErrorCode ierr;
499 
500   PetscFunctionBegin;
501   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
502   ierr = PetscPartitionerSetTypeFromOptions_Internal(part);CHKERRQ(ierr);
503 
504   ierr = PetscObjectOptionsBegin((PetscObject) part);CHKERRQ(ierr);
505   if (part->ops->setfromoptions) {ierr = (*part->ops->setfromoptions)(part);CHKERRQ(ierr);}
506   /* process any options handlers added with PetscObjectAddOptionsHandler() */
507   ierr = PetscObjectProcessOptionsHandlers(PetscOptionsObject,(PetscObject) part);CHKERRQ(ierr);
508   ierr = PetscOptionsEnd();CHKERRQ(ierr);
509   ierr = PetscPartitionerViewFromOptions(part, NULL, "-petscpartitioner_view");CHKERRQ(ierr);
510   PetscFunctionReturn(0);
511 }
512 
513 #undef __FUNCT__
514 #define __FUNCT__ "PetscPartitionerSetUp"
515 /*@C
516   PetscPartitionerSetUp - Construct data structures for the PetscPartitioner
517 
518   Collective on PetscPartitioner
519 
520   Input Parameter:
521 . part - the PetscPartitioner object to setup
522 
523   Level: developer
524 
525 .seealso: PetscPartitionerView(), PetscPartitionerDestroy()
526 @*/
527 PetscErrorCode PetscPartitionerSetUp(PetscPartitioner part)
528 {
529   PetscErrorCode ierr;
530 
531   PetscFunctionBegin;
532   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
533   if (part->ops->setup) {ierr = (*part->ops->setup)(part);CHKERRQ(ierr);}
534   PetscFunctionReturn(0);
535 }
536 
537 #undef __FUNCT__
538 #define __FUNCT__ "PetscPartitionerDestroy"
539 /*@
540   PetscPartitionerDestroy - Destroys a PetscPartitioner object
541 
542   Collective on PetscPartitioner
543 
544   Input Parameter:
545 . part - the PetscPartitioner object to destroy
546 
547   Level: developer
548 
549 .seealso: PetscPartitionerView()
550 @*/
551 PetscErrorCode PetscPartitionerDestroy(PetscPartitioner *part)
552 {
553   PetscErrorCode ierr;
554 
555   PetscFunctionBegin;
556   if (!*part) PetscFunctionReturn(0);
557   PetscValidHeaderSpecific((*part), PETSCPARTITIONER_CLASSID, 1);
558 
559   if (--((PetscObject)(*part))->refct > 0) {*part = 0; PetscFunctionReturn(0);}
560   ((PetscObject) (*part))->refct = 0;
561 
562   if ((*part)->ops->destroy) {ierr = (*(*part)->ops->destroy)(*part);CHKERRQ(ierr);}
563   ierr = PetscHeaderDestroy(part);CHKERRQ(ierr);
564   PetscFunctionReturn(0);
565 }
566 
567 #undef __FUNCT__
568 #define __FUNCT__ "PetscPartitionerCreate"
569 /*@
570   PetscPartitionerCreate - Creates an empty PetscPartitioner object. The type can then be set with PetscPartitionerSetType().
571 
572   Collective on MPI_Comm
573 
574   Input Parameter:
575 . comm - The communicator for the PetscPartitioner object
576 
577   Output Parameter:
578 . part - The PetscPartitioner object
579 
580   Level: beginner
581 
582 .seealso: PetscPartitionerSetType(), PETSCPARTITIONERCHACO, PETSCPARTITIONERPARMETIS, PETSCPARTITIONERSHELL, PETSCPARTITIONERSIMPLE, PETSCPARTITIONERGATHER
583 @*/
584 PetscErrorCode PetscPartitionerCreate(MPI_Comm comm, PetscPartitioner *part)
585 {
586   PetscPartitioner p;
587   PetscErrorCode   ierr;
588 
589   PetscFunctionBegin;
590   PetscValidPointer(part, 2);
591   *part = NULL;
592   ierr = PetscFVInitializePackage();CHKERRQ(ierr);
593 
594   ierr = PetscHeaderCreate(p, PETSCPARTITIONER_CLASSID, "PetscPartitioner", "Graph Partitioner", "PetscPartitioner", comm, PetscPartitionerDestroy, PetscPartitionerView);CHKERRQ(ierr);
595 
596   *part = p;
597   PetscFunctionReturn(0);
598 }
599 
600 #undef __FUNCT__
601 #define __FUNCT__ "PetscPartitionerPartition"
602 /*@
603   PetscPartitionerPartition - Create a non-overlapping partition of the cells in the mesh
604 
605   Collective on DM
606 
607   Input Parameters:
608 + part    - The PetscPartitioner
609 - dm      - The mesh DM
610 
611   Output Parameters:
612 + partSection     - The PetscSection giving the division of points by partition
613 - partition       - The list of points by partition
614 
615   Note: Instead of cells, points at a given height can be partitioned by calling PetscPartitionerSetPointHeight()
616 
617   Level: developer
618 
619 .seealso DMPlexDistribute(), PetscPartitionerSetPointHeight(), PetscPartitionerCreate()
620 @*/
621 PetscErrorCode PetscPartitionerPartition(PetscPartitioner part, DM dm, PetscSection partSection, IS *partition)
622 {
623   PetscMPIInt    size;
624   PetscErrorCode ierr;
625 
626   PetscFunctionBegin;
627   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
628   PetscValidHeaderSpecific(dm, DM_CLASSID, 2);
629   PetscValidHeaderSpecific(partSection, PETSC_SECTION_CLASSID, 4);
630   PetscValidPointer(partition, 5);
631   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) part), &size);CHKERRQ(ierr);
632   if (size == 1) {
633     PetscInt *points;
634     PetscInt  cStart, cEnd, c;
635 
636     ierr = DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);CHKERRQ(ierr);
637     ierr = PetscSectionSetChart(partSection, 0, size);CHKERRQ(ierr);
638     ierr = PetscSectionSetDof(partSection, 0, cEnd-cStart);CHKERRQ(ierr);
639     ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
640     ierr = PetscMalloc1(cEnd-cStart, &points);CHKERRQ(ierr);
641     for (c = cStart; c < cEnd; ++c) points[c] = c;
642     ierr = ISCreateGeneral(PetscObjectComm((PetscObject) part), cEnd-cStart, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
643     PetscFunctionReturn(0);
644   }
645   if (part->height == 0) {
646     PetscInt  numVertices;
647     PetscInt *start     = NULL;
648     PetscInt *adjacency = NULL;
649 
650     ierr = DMPlexCreatePartitionerGraph(dm, 0, &numVertices, &start, &adjacency);CHKERRQ(ierr);
651     if (!part->ops->partition) SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_WRONGSTATE, "PetscPartitioner has no type");
652     ierr = (*part->ops->partition)(part, dm, size, numVertices, start, adjacency, partSection, partition);CHKERRQ(ierr);
653     ierr = PetscFree(start);CHKERRQ(ierr);
654     ierr = PetscFree(adjacency);CHKERRQ(ierr);
655   } else SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_OUTOFRANGE, "Invalid height %D for points to partition", part->height);
656   PetscFunctionReturn(0);
657 }
658 
659 #undef __FUNCT__
660 #define __FUNCT__ "PetscPartitionerDestroy_Shell"
661 PetscErrorCode PetscPartitionerDestroy_Shell(PetscPartitioner part)
662 {
663   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
664   PetscErrorCode          ierr;
665 
666   PetscFunctionBegin;
667   ierr = PetscSectionDestroy(&p->section);CHKERRQ(ierr);
668   ierr = ISDestroy(&p->partition);CHKERRQ(ierr);
669   ierr = PetscFree(p);CHKERRQ(ierr);
670   PetscFunctionReturn(0);
671 }
672 
673 #undef __FUNCT__
674 #define __FUNCT__ "PetscPartitionerView_Shell_Ascii"
675 PetscErrorCode PetscPartitionerView_Shell_Ascii(PetscPartitioner part, PetscViewer viewer)
676 {
677   PetscViewerFormat format;
678   PetscErrorCode    ierr;
679 
680   PetscFunctionBegin;
681   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
682   ierr = PetscViewerASCIIPrintf(viewer, "Shell Graph Partitioner:\n");CHKERRQ(ierr);
683   PetscFunctionReturn(0);
684 }
685 
686 #undef __FUNCT__
687 #define __FUNCT__ "PetscPartitionerView_Shell"
688 PetscErrorCode PetscPartitionerView_Shell(PetscPartitioner part, PetscViewer viewer)
689 {
690   PetscBool      iascii;
691   PetscErrorCode ierr;
692 
693   PetscFunctionBegin;
694   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
695   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
696   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
697   if (iascii) {ierr = PetscPartitionerView_Shell_Ascii(part, viewer);CHKERRQ(ierr);}
698   PetscFunctionReturn(0);
699 }
700 
701 #undef __FUNCT__
702 #define __FUNCT__ "PetscPartitionerPartition_Shell"
703 PetscErrorCode PetscPartitionerPartition_Shell(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
704 {
705   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
706   PetscInt                np;
707   PetscErrorCode          ierr;
708 
709   PetscFunctionBegin;
710   ierr = PetscSectionGetChart(p->section, NULL, &np);CHKERRQ(ierr);
711   if (nparts != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of requested partitions %d != configured partitions %d", nparts, np);
712   ierr = ISGetLocalSize(p->partition, &np);CHKERRQ(ierr);
713   if (numVertices != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of input vertices %d != configured vertices %d", numVertices, np);
714   ierr = PetscSectionCopy(p->section, partSection);CHKERRQ(ierr);
715   *partition = p->partition;
716   ierr = PetscObjectReference((PetscObject) p->partition);CHKERRQ(ierr);
717   PetscFunctionReturn(0);
718 }
719 
720 #undef __FUNCT__
721 #define __FUNCT__ "PetscPartitionerInitialize_Shell"
722 PetscErrorCode PetscPartitionerInitialize_Shell(PetscPartitioner part)
723 {
724   PetscFunctionBegin;
725   part->ops->view      = PetscPartitionerView_Shell;
726   part->ops->destroy   = PetscPartitionerDestroy_Shell;
727   part->ops->partition = PetscPartitionerPartition_Shell;
728   PetscFunctionReturn(0);
729 }
730 
731 /*MC
732   PETSCPARTITIONERSHELL = "shell" - A PetscPartitioner object
733 
734   Level: intermediate
735 
736 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
737 M*/
738 
739 #undef __FUNCT__
740 #define __FUNCT__ "PetscPartitionerCreate_Shell"
741 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Shell(PetscPartitioner part)
742 {
743   PetscPartitioner_Shell *p;
744   PetscErrorCode          ierr;
745 
746   PetscFunctionBegin;
747   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
748   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
749   part->data = p;
750 
751   ierr = PetscPartitionerInitialize_Shell(part);CHKERRQ(ierr);
752   PetscFunctionReturn(0);
753 }
754 
755 #undef __FUNCT__
756 #define __FUNCT__ "PetscPartitionerShellSetPartition"
757 /*@C
758   PetscPartitionerShellSetPartition - Set an artifical partition for a mesh
759 
760   Collective on PART
761 
762   Input Parameters:
763 + part     - The PetscPartitioner
764 . numProcs - The number of partitions
765 . sizes    - array of size numProcs (or NULL) providing the number of points in each partition
766 - points   - array of size sum(sizes) (may be NULL iff sizes is NULL) providing the partition each point belongs to
767 
768   Level: developer
769 
770   Notes:
771 
772     It is safe to free the sizes and points arrays after use in this routine.
773 
774 .seealso DMPlexDistribute(), PetscPartitionerCreate()
775 @*/
776 PetscErrorCode PetscPartitionerShellSetPartition(PetscPartitioner part, PetscInt numProcs, const PetscInt sizes[], const PetscInt points[])
777 {
778   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
779   PetscInt                proc, numPoints;
780   PetscErrorCode          ierr;
781 
782   PetscFunctionBegin;
783   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
784   if (sizes) {PetscValidPointer(sizes, 3);}
785   if (sizes) {PetscValidPointer(points, 4);}
786   ierr = PetscSectionDestroy(&p->section);CHKERRQ(ierr);
787   ierr = ISDestroy(&p->partition);CHKERRQ(ierr);
788   ierr = PetscSectionCreate(PetscObjectComm((PetscObject) part), &p->section);CHKERRQ(ierr);
789   ierr = PetscSectionSetChart(p->section, 0, numProcs);CHKERRQ(ierr);
790   if (sizes) {
791     for (proc = 0; proc < numProcs; ++proc) {
792       ierr = PetscSectionSetDof(p->section, proc, sizes[proc]);CHKERRQ(ierr);
793     }
794   }
795   ierr = PetscSectionSetUp(p->section);CHKERRQ(ierr);
796   ierr = PetscSectionGetStorageSize(p->section, &numPoints);CHKERRQ(ierr);
797   ierr = ISCreateGeneral(PetscObjectComm((PetscObject) part), numPoints, points, PETSC_COPY_VALUES, &p->partition);CHKERRQ(ierr);
798   PetscFunctionReturn(0);
799 }
800 
801 #undef __FUNCT__
802 #define __FUNCT__ "PetscPartitionerDestroy_Simple"
803 PetscErrorCode PetscPartitionerDestroy_Simple(PetscPartitioner part)
804 {
805   PetscPartitioner_Simple *p = (PetscPartitioner_Simple *) part->data;
806   PetscErrorCode          ierr;
807 
808   PetscFunctionBegin;
809   ierr = PetscFree(p);CHKERRQ(ierr);
810   PetscFunctionReturn(0);
811 }
812 
813 #undef __FUNCT__
814 #define __FUNCT__ "PetscPartitionerView_Simple_Ascii"
815 PetscErrorCode PetscPartitionerView_Simple_Ascii(PetscPartitioner part, PetscViewer viewer)
816 {
817   PetscViewerFormat format;
818   PetscErrorCode    ierr;
819 
820   PetscFunctionBegin;
821   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
822   ierr = PetscViewerASCIIPrintf(viewer, "Simple Graph Partitioner:\n");CHKERRQ(ierr);
823   PetscFunctionReturn(0);
824 }
825 
826 #undef __FUNCT__
827 #define __FUNCT__ "PetscPartitionerView_Simple"
828 PetscErrorCode PetscPartitionerView_Simple(PetscPartitioner part, PetscViewer viewer)
829 {
830   PetscBool      iascii;
831   PetscErrorCode ierr;
832 
833   PetscFunctionBegin;
834   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
835   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
836   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
837   if (iascii) {ierr = PetscPartitionerView_Simple_Ascii(part, viewer);CHKERRQ(ierr);}
838   PetscFunctionReturn(0);
839 }
840 
841 #undef __FUNCT__
842 #define __FUNCT__ "PetscPartitionerPartition_Simple"
843 PetscErrorCode PetscPartitionerPartition_Simple(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
844 {
845   MPI_Comm       comm;
846   PetscInt       np;
847   PetscMPIInt    size;
848   PetscErrorCode ierr;
849 
850   PetscFunctionBegin;
851   comm = PetscObjectComm((PetscObject)dm);
852   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
853   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
854   ierr = ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);CHKERRQ(ierr);
855   if (size == 1) {
856     for (np = 0; np < nparts; ++np) {ierr = PetscSectionSetDof(partSection, np, numVertices/nparts + ((numVertices % nparts) > np));CHKERRQ(ierr);}
857   }
858   else {
859     PetscMPIInt rank;
860     PetscInt nvGlobal, *offsets, myFirst, myLast;
861 
862     ierr = PetscMalloc1(size+1,&offsets);CHKERRQ(ierr);
863     offsets[0] = 0;
864     ierr = MPI_Allgather(&numVertices,1,MPIU_INT,&offsets[1],1,MPIU_INT,comm);CHKERRQ(ierr);
865     for (np = 2; np <= size; np++) {
866       offsets[np] += offsets[np-1];
867     }
868     nvGlobal = offsets[size];
869     ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
870     myFirst = offsets[rank];
871     myLast  = offsets[rank + 1] - 1;
872     ierr = PetscFree(offsets);CHKERRQ(ierr);
873     if (numVertices) {
874       PetscInt firstPart = 0, firstLargePart = 0;
875       PetscInt lastPart = 0, lastLargePart = 0;
876       PetscInt rem = nvGlobal % nparts;
877       PetscInt pSmall = nvGlobal/nparts;
878       PetscInt pBig = nvGlobal/nparts + 1;
879 
880 
881       if (rem) {
882         firstLargePart = myFirst / pBig;
883         lastLargePart  = myLast  / pBig;
884 
885         if (firstLargePart < rem) {
886           firstPart = firstLargePart;
887         }
888         else {
889           firstPart = rem + (myFirst - (rem * pBig)) / pSmall;
890         }
891         if (lastLargePart < rem) {
892           lastPart = lastLargePart;
893         }
894         else {
895           lastPart = rem + (myLast - (rem * pBig)) / pSmall;
896         }
897       }
898       else {
899         firstPart = myFirst / (nvGlobal/nparts);
900         lastPart  = myLast  / (nvGlobal/nparts);
901       }
902 
903       for (np = firstPart; np <= lastPart; np++) {
904         PetscInt PartStart =  np    * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np);
905         PetscInt PartEnd   = (np+1) * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np+1);
906 
907         PartStart = PetscMax(PartStart,myFirst);
908         PartEnd   = PetscMin(PartEnd,myLast+1);
909         ierr = PetscSectionSetDof(partSection,np,PartEnd-PartStart);CHKERRQ(ierr);
910       }
911     }
912   }
913   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
914   PetscFunctionReturn(0);
915 }
916 
917 #undef __FUNCT__
918 #define __FUNCT__ "PetscPartitionerInitialize_Simple"
919 PetscErrorCode PetscPartitionerInitialize_Simple(PetscPartitioner part)
920 {
921   PetscFunctionBegin;
922   part->ops->view      = PetscPartitionerView_Simple;
923   part->ops->destroy   = PetscPartitionerDestroy_Simple;
924   part->ops->partition = PetscPartitionerPartition_Simple;
925   PetscFunctionReturn(0);
926 }
927 
928 /*MC
929   PETSCPARTITIONERSIMPLE = "simple" - A PetscPartitioner object
930 
931   Level: intermediate
932 
933 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
934 M*/
935 
936 #undef __FUNCT__
937 #define __FUNCT__ "PetscPartitionerCreate_Simple"
938 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Simple(PetscPartitioner part)
939 {
940   PetscPartitioner_Simple *p;
941   PetscErrorCode           ierr;
942 
943   PetscFunctionBegin;
944   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
945   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
946   part->data = p;
947 
948   ierr = PetscPartitionerInitialize_Simple(part);CHKERRQ(ierr);
949   PetscFunctionReturn(0);
950 }
951 
952 #undef __FUNCT__
953 #define __FUNCT__ "PetscPartitionerDestroy_Gather"
954 PetscErrorCode PetscPartitionerDestroy_Gather(PetscPartitioner part)
955 {
956   PetscPartitioner_Gather *p = (PetscPartitioner_Gather *) part->data;
957   PetscErrorCode          ierr;
958 
959   PetscFunctionBegin;
960   ierr = PetscFree(p);CHKERRQ(ierr);
961   PetscFunctionReturn(0);
962 }
963 
964 #undef __FUNCT__
965 #define __FUNCT__ "PetscPartitionerView_Gather_Ascii"
966 PetscErrorCode PetscPartitionerView_Gather_Ascii(PetscPartitioner part, PetscViewer viewer)
967 {
968   PetscViewerFormat format;
969   PetscErrorCode    ierr;
970 
971   PetscFunctionBegin;
972   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
973   ierr = PetscViewerASCIIPrintf(viewer, "Gather Graph Partitioner:\n");CHKERRQ(ierr);
974   PetscFunctionReturn(0);
975 }
976 
977 #undef __FUNCT__
978 #define __FUNCT__ "PetscPartitionerView_Gather"
979 PetscErrorCode PetscPartitionerView_Gather(PetscPartitioner part, PetscViewer viewer)
980 {
981   PetscBool      iascii;
982   PetscErrorCode ierr;
983 
984   PetscFunctionBegin;
985   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
986   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
987   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
988   if (iascii) {ierr = PetscPartitionerView_Gather_Ascii(part, viewer);CHKERRQ(ierr);}
989   PetscFunctionReturn(0);
990 }
991 
992 #undef __FUNCT__
993 #define __FUNCT__ "PetscPartitionerPartition_Gather"
994 PetscErrorCode PetscPartitionerPartition_Gather(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
995 {
996   PetscInt       np;
997   PetscErrorCode ierr;
998 
999   PetscFunctionBegin;
1000   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1001   ierr = ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);CHKERRQ(ierr);
1002   ierr = PetscSectionSetDof(partSection,0,numVertices);CHKERRQ(ierr);
1003   for (np = 1; np < nparts; ++np) {ierr = PetscSectionSetDof(partSection, np, 0);CHKERRQ(ierr);}
1004   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1005   PetscFunctionReturn(0);
1006 }
1007 
1008 #undef __FUNCT__
1009 #define __FUNCT__ "PetscPartitionerInitialize_Gather"
1010 PetscErrorCode PetscPartitionerInitialize_Gather(PetscPartitioner part)
1011 {
1012   PetscFunctionBegin;
1013   part->ops->view      = PetscPartitionerView_Gather;
1014   part->ops->destroy   = PetscPartitionerDestroy_Gather;
1015   part->ops->partition = PetscPartitionerPartition_Gather;
1016   PetscFunctionReturn(0);
1017 }
1018 
1019 /*MC
1020   PETSCPARTITIONERGATHER = "gather" - A PetscPartitioner object
1021 
1022   Level: intermediate
1023 
1024 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1025 M*/
1026 
1027 #undef __FUNCT__
1028 #define __FUNCT__ "PetscPartitionerCreate_Gather"
1029 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Gather(PetscPartitioner part)
1030 {
1031   PetscPartitioner_Gather *p;
1032   PetscErrorCode           ierr;
1033 
1034   PetscFunctionBegin;
1035   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1036   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1037   part->data = p;
1038 
1039   ierr = PetscPartitionerInitialize_Gather(part);CHKERRQ(ierr);
1040   PetscFunctionReturn(0);
1041 }
1042 
1043 
1044 #undef __FUNCT__
1045 #define __FUNCT__ "PetscPartitionerDestroy_Chaco"
1046 PetscErrorCode PetscPartitionerDestroy_Chaco(PetscPartitioner part)
1047 {
1048   PetscPartitioner_Chaco *p = (PetscPartitioner_Chaco *) part->data;
1049   PetscErrorCode          ierr;
1050 
1051   PetscFunctionBegin;
1052   ierr = PetscFree(p);CHKERRQ(ierr);
1053   PetscFunctionReturn(0);
1054 }
1055 
1056 #undef __FUNCT__
1057 #define __FUNCT__ "PetscPartitionerView_Chaco_Ascii"
1058 PetscErrorCode PetscPartitionerView_Chaco_Ascii(PetscPartitioner part, PetscViewer viewer)
1059 {
1060   PetscViewerFormat format;
1061   PetscErrorCode    ierr;
1062 
1063   PetscFunctionBegin;
1064   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
1065   ierr = PetscViewerASCIIPrintf(viewer, "Chaco Graph Partitioner:\n");CHKERRQ(ierr);
1066   PetscFunctionReturn(0);
1067 }
1068 
1069 #undef __FUNCT__
1070 #define __FUNCT__ "PetscPartitionerView_Chaco"
1071 PetscErrorCode PetscPartitionerView_Chaco(PetscPartitioner part, PetscViewer viewer)
1072 {
1073   PetscBool      iascii;
1074   PetscErrorCode ierr;
1075 
1076   PetscFunctionBegin;
1077   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1078   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1079   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1080   if (iascii) {ierr = PetscPartitionerView_Chaco_Ascii(part, viewer);CHKERRQ(ierr);}
1081   PetscFunctionReturn(0);
1082 }
1083 
1084 #if defined(PETSC_HAVE_CHACO)
1085 #if defined(PETSC_HAVE_UNISTD_H)
1086 #include <unistd.h>
1087 #endif
1088 /* Chaco does not have an include file */
1089 PETSC_EXTERN int interface(int nvtxs, int *start, int *adjacency, int *vwgts,
1090                        float *ewgts, float *x, float *y, float *z, char *outassignname,
1091                        char *outfilename, short *assignment, int architecture, int ndims_tot,
1092                        int mesh_dims[3], double *goal, int global_method, int local_method,
1093                        int rqi_flag, int vmax, int ndims, double eigtol, long seed);
1094 
1095 extern int FREE_GRAPH;
1096 #endif
1097 
1098 #undef __FUNCT__
1099 #define __FUNCT__ "PetscPartitionerPartition_Chaco"
1100 PetscErrorCode PetscPartitionerPartition_Chaco(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1101 {
1102 #if defined(PETSC_HAVE_CHACO)
1103   enum {DEFAULT_METHOD = 1, INERTIAL_METHOD = 3};
1104   MPI_Comm       comm;
1105   int            nvtxs          = numVertices; /* number of vertices in full graph */
1106   int           *vwgts          = NULL;   /* weights for all vertices */
1107   float         *ewgts          = NULL;   /* weights for all edges */
1108   float         *x              = NULL, *y = NULL, *z = NULL; /* coordinates for inertial method */
1109   char          *outassignname  = NULL;   /*  name of assignment output file */
1110   char          *outfilename    = NULL;   /* output file name */
1111   int            architecture   = 1;      /* 0 => hypercube, d => d-dimensional mesh */
1112   int            ndims_tot      = 0;      /* total number of cube dimensions to divide */
1113   int            mesh_dims[3];            /* dimensions of mesh of processors */
1114   double        *goal          = NULL;    /* desired set sizes for each set */
1115   int            global_method = 1;       /* global partitioning algorithm */
1116   int            local_method  = 1;       /* local partitioning algorithm */
1117   int            rqi_flag      = 0;       /* should I use RQI/Symmlq eigensolver? */
1118   int            vmax          = 200;     /* how many vertices to coarsen down to? */
1119   int            ndims         = 1;       /* number of eigenvectors (2^d sets) */
1120   double         eigtol        = 0.001;   /* tolerance on eigenvectors */
1121   long           seed          = 123636512; /* for random graph mutations */
1122   short int     *assignment;              /* Output partition */
1123   int            fd_stdout, fd_pipe[2];
1124   PetscInt      *points;
1125   int            i, v, p;
1126   PetscErrorCode ierr;
1127 
1128   PetscFunctionBegin;
1129   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
1130   if (!numVertices) {
1131     ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1132     ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1133     ierr = ISCreateGeneral(comm, 0, NULL, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1134     PetscFunctionReturn(0);
1135   }
1136   FREE_GRAPH = 0;                         /* Do not let Chaco free my memory */
1137   for (i = 0; i < start[numVertices]; ++i) ++adjacency[i];
1138 
1139   if (global_method == INERTIAL_METHOD) {
1140     /* manager.createCellCoordinates(nvtxs, &x, &y, &z); */
1141     SETERRQ(comm, PETSC_ERR_SUP, "Inertial partitioning not yet supported");
1142   }
1143   mesh_dims[0] = nparts;
1144   mesh_dims[1] = 1;
1145   mesh_dims[2] = 1;
1146   ierr = PetscMalloc1(nvtxs, &assignment);CHKERRQ(ierr);
1147   /* Chaco outputs to stdout. We redirect this to a buffer. */
1148   /* TODO: check error codes for UNIX calls */
1149 #if defined(PETSC_HAVE_UNISTD_H)
1150   {
1151     int piperet;
1152     piperet = pipe(fd_pipe);
1153     if (piperet) SETERRQ(comm,PETSC_ERR_SYS,"Could not create pipe");
1154     fd_stdout = dup(1);
1155     close(1);
1156     dup2(fd_pipe[1], 1);
1157   }
1158 #endif
1159   ierr = interface(nvtxs, (int*) start, (int*) adjacency, vwgts, ewgts, x, y, z, outassignname, outfilename,
1160                    assignment, architecture, ndims_tot, mesh_dims, goal, global_method, local_method, rqi_flag,
1161                    vmax, ndims, eigtol, seed);
1162 #if defined(PETSC_HAVE_UNISTD_H)
1163   {
1164     char msgLog[10000];
1165     int  count;
1166 
1167     fflush(stdout);
1168     count = read(fd_pipe[0], msgLog, (10000-1)*sizeof(char));
1169     if (count < 0) count = 0;
1170     msgLog[count] = 0;
1171     close(1);
1172     dup2(fd_stdout, 1);
1173     close(fd_stdout);
1174     close(fd_pipe[0]);
1175     close(fd_pipe[1]);
1176     if (ierr) SETERRQ1(comm, PETSC_ERR_LIB, "Error in Chaco library: %s", msgLog);
1177   }
1178 #endif
1179   /* Convert to PetscSection+IS */
1180   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1181   for (v = 0; v < nvtxs; ++v) {
1182     ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);
1183   }
1184   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1185   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
1186   for (p = 0, i = 0; p < nparts; ++p) {
1187     for (v = 0; v < nvtxs; ++v) {
1188       if (assignment[v] == p) points[i++] = v;
1189     }
1190   }
1191   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1192   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1193   if (global_method == INERTIAL_METHOD) {
1194     /* manager.destroyCellCoordinates(nvtxs, &x, &y, &z); */
1195   }
1196   ierr = PetscFree(assignment);CHKERRQ(ierr);
1197   for (i = 0; i < start[numVertices]; ++i) --adjacency[i];
1198   PetscFunctionReturn(0);
1199 #else
1200   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-chaco.");
1201 #endif
1202 }
1203 
1204 #undef __FUNCT__
1205 #define __FUNCT__ "PetscPartitionerInitialize_Chaco"
1206 PetscErrorCode PetscPartitionerInitialize_Chaco(PetscPartitioner part)
1207 {
1208   PetscFunctionBegin;
1209   part->ops->view      = PetscPartitionerView_Chaco;
1210   part->ops->destroy   = PetscPartitionerDestroy_Chaco;
1211   part->ops->partition = PetscPartitionerPartition_Chaco;
1212   PetscFunctionReturn(0);
1213 }
1214 
1215 /*MC
1216   PETSCPARTITIONERCHACO = "chaco" - A PetscPartitioner object using the Chaco library
1217 
1218   Level: intermediate
1219 
1220 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1221 M*/
1222 
1223 #undef __FUNCT__
1224 #define __FUNCT__ "PetscPartitionerCreate_Chaco"
1225 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Chaco(PetscPartitioner part)
1226 {
1227   PetscPartitioner_Chaco *p;
1228   PetscErrorCode          ierr;
1229 
1230   PetscFunctionBegin;
1231   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1232   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1233   part->data = p;
1234 
1235   ierr = PetscPartitionerInitialize_Chaco(part);CHKERRQ(ierr);
1236   ierr = PetscCitationsRegister(ChacoPartitionerCitation, &ChacoPartitionercite);CHKERRQ(ierr);
1237   PetscFunctionReturn(0);
1238 }
1239 
1240 #undef __FUNCT__
1241 #define __FUNCT__ "PetscPartitionerDestroy_ParMetis"
1242 PetscErrorCode PetscPartitionerDestroy_ParMetis(PetscPartitioner part)
1243 {
1244   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
1245   PetscErrorCode             ierr;
1246 
1247   PetscFunctionBegin;
1248   ierr = PetscFree(p);CHKERRQ(ierr);
1249   PetscFunctionReturn(0);
1250 }
1251 
1252 #undef __FUNCT__
1253 #define __FUNCT__ "PetscPartitionerView_ParMetis_Ascii"
1254 PetscErrorCode PetscPartitionerView_ParMetis_Ascii(PetscPartitioner part, PetscViewer viewer)
1255 {
1256   PetscViewerFormat format;
1257   PetscErrorCode    ierr;
1258 
1259   PetscFunctionBegin;
1260   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
1261   ierr = PetscViewerASCIIPrintf(viewer, "ParMetis Graph Partitioner:\n");CHKERRQ(ierr);
1262   PetscFunctionReturn(0);
1263 }
1264 
1265 #undef __FUNCT__
1266 #define __FUNCT__ "PetscPartitionerView_ParMetis"
1267 PetscErrorCode PetscPartitionerView_ParMetis(PetscPartitioner part, PetscViewer viewer)
1268 {
1269   PetscBool      iascii;
1270   PetscErrorCode ierr;
1271 
1272   PetscFunctionBegin;
1273   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1274   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1275   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1276   if (iascii) {ierr = PetscPartitionerView_ParMetis_Ascii(part, viewer);CHKERRQ(ierr);}
1277   PetscFunctionReturn(0);
1278 }
1279 
1280 #if defined(PETSC_HAVE_PARMETIS)
1281 #include <parmetis.h>
1282 #endif
1283 
1284 #undef __FUNCT__
1285 #define __FUNCT__ "PetscPartitionerPartition_ParMetis"
1286 PetscErrorCode PetscPartitionerPartition_ParMetis(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1287 {
1288 #if defined(PETSC_HAVE_PARMETIS)
1289   MPI_Comm       comm;
1290   PetscInt       nvtxs      = numVertices; /* The number of vertices in full graph */
1291   PetscInt      *vtxdist;                  /* Distribution of vertices across processes */
1292   PetscInt      *xadj       = start;       /* Start of edge list for each vertex */
1293   PetscInt      *adjncy     = adjacency;   /* Edge lists for all vertices */
1294   PetscInt      *vwgt       = NULL;        /* Vertex weights */
1295   PetscInt      *adjwgt     = NULL;        /* Edge weights */
1296   PetscInt       wgtflag    = 0;           /* Indicates which weights are present */
1297   PetscInt       numflag    = 0;           /* Indicates initial offset (0 or 1) */
1298   PetscInt       ncon       = 1;           /* The number of weights per vertex */
1299   PetscReal     *tpwgts;                   /* The fraction of vertex weights assigned to each partition */
1300   PetscReal     *ubvec;                    /* The balance intolerance for vertex weights */
1301   PetscInt       options[5];               /* Options */
1302   /* Outputs */
1303   PetscInt       edgeCut;                  /* The number of edges cut by the partition */
1304   PetscInt      *assignment, *points;
1305   PetscMPIInt    rank, p, v, i;
1306   PetscErrorCode ierr;
1307   PetscInt       cstart, cend, dof;
1308   PetscSection   dfsection;
1309 
1310   PetscFunctionBegin;
1311   ierr = PetscObjectGetComm((PetscObject) part, &comm);CHKERRQ(ierr);
1312   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1313   options[0] = 0; /* Use all defaults */
1314   /* Calculate vertex distribution */
1315   ierr = PetscMalloc5(nparts+1,&vtxdist,nparts*ncon,&tpwgts,ncon,&ubvec,nvtxs,&assignment,nvtxs,&vwgt);CHKERRQ(ierr);
1316   vtxdist[0] = 0;
1317   ierr = MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm);CHKERRQ(ierr);
1318   for (p = 2; p <= nparts; ++p) {
1319     vtxdist[p] += vtxdist[p-1];
1320   }
1321   /* Calculate weights */
1322   for (p = 0; p < nparts; ++p) {
1323     tpwgts[p] = 1.0/nparts;
1324   }
1325   ubvec[0] = 1.05;
1326 
1327   ierr = DMPlexGetHeightStratum(dm,0,&cstart,&cend);CHKERRQ(ierr);
1328   ierr = DMGetDefaultSection(dm,&dfsection);CHKERRQ(ierr);
1329   for(i= cstart; i < cend; i++) {
1330     ierr = PetscSectionGetDof(dfsection,i,&dof);CHKERRQ(ierr);
1331     vwgt[i-cstart] = (dof == 0) ? 1 : dof;
1332   }
1333 
1334   if (nparts == 1) {
1335     ierr = PetscMemzero(assignment, nvtxs * sizeof(PetscInt));CHKERRQ(ierr);
1336   } else {
1337     if (vtxdist[1] == vtxdist[nparts]) {
1338       if (!rank) {
1339         PetscStackPush("METIS_PartGraphKway");
1340         ierr = METIS_PartGraphKway(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, adjwgt, &nparts, tpwgts, ubvec, NULL, &edgeCut, assignment);
1341         PetscStackPop;
1342         if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphKway()");
1343       }
1344     } else {
1345       PetscStackPush("ParMETIS_V3_PartKway");
1346       ierr = ParMETIS_V3_PartKway(vtxdist, xadj, adjncy, vwgt, adjwgt, &wgtflag, &numflag, &ncon, &nparts, tpwgts, ubvec, options, &edgeCut, assignment, &comm);
1347       PetscStackPop;
1348       if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in ParMETIS_V3_PartKway()");
1349     }
1350   }
1351   /* Convert to PetscSection+IS */
1352   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1353   for (v = 0; v < nvtxs; ++v) {ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);}
1354   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1355   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
1356   for (p = 0, i = 0; p < nparts; ++p) {
1357     for (v = 0; v < nvtxs; ++v) {
1358       if (assignment[v] == p) points[i++] = v;
1359     }
1360   }
1361   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1362   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1363   ierr = PetscFree5(vtxdist,tpwgts,ubvec,assignment,vwgt);CHKERRQ(ierr);
1364   PetscFunctionReturn(0);
1365 #else
1366   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-parmetis.");
1367 #endif
1368 }
1369 
1370 #undef __FUNCT__
1371 #define __FUNCT__ "PetscPartitionerInitialize_ParMetis"
1372 PetscErrorCode PetscPartitionerInitialize_ParMetis(PetscPartitioner part)
1373 {
1374   PetscFunctionBegin;
1375   part->ops->view      = PetscPartitionerView_ParMetis;
1376   part->ops->destroy   = PetscPartitionerDestroy_ParMetis;
1377   part->ops->partition = PetscPartitionerPartition_ParMetis;
1378   PetscFunctionReturn(0);
1379 }
1380 
1381 /*MC
1382   PETSCPARTITIONERPARMETIS = "parmetis" - A PetscPartitioner object using the ParMetis library
1383 
1384   Level: intermediate
1385 
1386 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1387 M*/
1388 
1389 #undef __FUNCT__
1390 #define __FUNCT__ "PetscPartitionerCreate_ParMetis"
1391 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_ParMetis(PetscPartitioner part)
1392 {
1393   PetscPartitioner_ParMetis *p;
1394   PetscErrorCode          ierr;
1395 
1396   PetscFunctionBegin;
1397   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1398   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1399   part->data = p;
1400 
1401   ierr = PetscPartitionerInitialize_ParMetis(part);CHKERRQ(ierr);
1402   ierr = PetscCitationsRegister(ParMetisPartitionerCitation, &ParMetisPartitionercite);CHKERRQ(ierr);
1403   PetscFunctionReturn(0);
1404 }
1405 
1406 #undef __FUNCT__
1407 #define __FUNCT__ "DMPlexGetPartitioner"
1408 /*@
1409   DMPlexGetPartitioner - Get the mesh partitioner
1410 
1411   Not collective
1412 
1413   Input Parameter:
1414 . dm - The DM
1415 
1416   Output Parameter:
1417 . part - The PetscPartitioner
1418 
1419   Level: developer
1420 
1421   Note: This gets a borrowed reference, so the user should not destroy this PetscPartitioner.
1422 
1423 .seealso DMPlexDistribute(), DMPlexSetPartitioner(), PetscPartitionerCreate()
1424 @*/
1425 PetscErrorCode DMPlexGetPartitioner(DM dm, PetscPartitioner *part)
1426 {
1427   DM_Plex *mesh = (DM_Plex *) dm->data;
1428 
1429   PetscFunctionBegin;
1430   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1431   PetscValidPointer(part, 2);
1432   *part = mesh->partitioner;
1433   PetscFunctionReturn(0);
1434 }
1435 
1436 #undef __FUNCT__
1437 #define __FUNCT__ "DMPlexSetPartitioner"
1438 /*@
1439   DMPlexSetPartitioner - Set the mesh partitioner
1440 
1441   logically collective on dm and part
1442 
1443   Input Parameters:
1444 + dm - The DM
1445 - part - The partitioner
1446 
1447   Level: developer
1448 
1449   Note: Any existing PetscPartitioner will be destroyed.
1450 
1451 .seealso DMPlexDistribute(), DMPlexGetPartitioner(), PetscPartitionerCreate()
1452 @*/
1453 PetscErrorCode DMPlexSetPartitioner(DM dm, PetscPartitioner part)
1454 {
1455   DM_Plex       *mesh = (DM_Plex *) dm->data;
1456   PetscErrorCode ierr;
1457 
1458   PetscFunctionBegin;
1459   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1460   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 2);
1461   ierr = PetscObjectReference((PetscObject)part);CHKERRQ(ierr);
1462   ierr = PetscPartitionerDestroy(&mesh->partitioner);CHKERRQ(ierr);
1463   mesh->partitioner = part;
1464   PetscFunctionReturn(0);
1465 }
1466 
1467 #undef __FUNCT__
1468 #define __FUNCT__ "DMPlexPartitionLabelClosure_Tree"
1469 static PetscErrorCode DMPlexPartitionLabelClosure_Tree(DM dm, DMLabel label, PetscInt rank, PetscInt point, PetscBool up, PetscBool down)
1470 {
1471   PetscErrorCode ierr;
1472 
1473   PetscFunctionBegin;
1474   if (up) {
1475     PetscInt parent;
1476 
1477     ierr = DMPlexGetTreeParent(dm,point,&parent,NULL);CHKERRQ(ierr);
1478     if (parent != point) {
1479       PetscInt closureSize, *closure = NULL, i;
1480 
1481       ierr = DMPlexGetTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);CHKERRQ(ierr);
1482       for (i = 0; i < closureSize; i++) {
1483         PetscInt cpoint = closure[2*i];
1484 
1485         ierr = DMLabelSetValue(label,cpoint,rank);CHKERRQ(ierr);
1486         ierr = DMPlexPartitionLabelClosure_Tree(dm,label,rank,cpoint,PETSC_TRUE,PETSC_FALSE);CHKERRQ(ierr);
1487       }
1488       ierr = DMPlexRestoreTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);CHKERRQ(ierr);
1489     }
1490   }
1491   if (down) {
1492     PetscInt numChildren;
1493     const PetscInt *children;
1494 
1495     ierr = DMPlexGetTreeChildren(dm,point,&numChildren,&children);CHKERRQ(ierr);
1496     if (numChildren) {
1497       PetscInt i;
1498 
1499       for (i = 0; i < numChildren; i++) {
1500         PetscInt cpoint = children[i];
1501 
1502         ierr = DMLabelSetValue(label,cpoint,rank);CHKERRQ(ierr);
1503         ierr = DMPlexPartitionLabelClosure_Tree(dm,label,rank,cpoint,PETSC_FALSE,PETSC_TRUE);CHKERRQ(ierr);
1504       }
1505     }
1506   }
1507   PetscFunctionReturn(0);
1508 }
1509 
1510 #undef __FUNCT__
1511 #define __FUNCT__ "DMPlexPartitionLabelClosure"
1512 /*@
1513   DMPlexPartitionLabelClosure - Add the closure of all points to the partition label
1514 
1515   Input Parameters:
1516 + dm     - The DM
1517 - label  - DMLabel assinging ranks to remote roots
1518 
1519   Level: developer
1520 
1521 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1522 @*/
1523 PetscErrorCode DMPlexPartitionLabelClosure(DM dm, DMLabel label)
1524 {
1525   IS              rankIS,   pointIS;
1526   const PetscInt *ranks,   *points;
1527   PetscInt        numRanks, numPoints, r, p, c, closureSize;
1528   PetscInt       *closure = NULL;
1529   PetscErrorCode  ierr;
1530 
1531   PetscFunctionBegin;
1532   ierr = DMLabelGetValueIS(label, &rankIS);CHKERRQ(ierr);
1533   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
1534   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
1535   for (r = 0; r < numRanks; ++r) {
1536     const PetscInt rank = ranks[r];
1537 
1538     ierr = DMLabelGetStratumIS(label, rank, &pointIS);CHKERRQ(ierr);
1539     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
1540     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
1541     for (p = 0; p < numPoints; ++p) {
1542       ierr = DMPlexGetTransitiveClosure(dm, points[p], PETSC_TRUE, &closureSize, &closure);CHKERRQ(ierr);
1543       for (c = 0; c < closureSize*2; c += 2) {
1544         ierr = DMLabelSetValue(label, closure[c], rank);CHKERRQ(ierr);
1545         ierr = DMPlexPartitionLabelClosure_Tree(dm,label,rank,closure[c],PETSC_TRUE,PETSC_TRUE);CHKERRQ(ierr);
1546       }
1547     }
1548     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
1549     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1550   }
1551   if (closure) {ierr = DMPlexRestoreTransitiveClosure(dm, 0, PETSC_TRUE, &closureSize, &closure);CHKERRQ(ierr);}
1552   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
1553   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
1554   PetscFunctionReturn(0);
1555 }
1556 
1557 #undef __FUNCT__
1558 #define __FUNCT__ "DMPlexPartitionLabelAdjacency"
1559 /*@
1560   DMPlexPartitionLabelAdjacency - Add one level of adjacent points to the partition label
1561 
1562   Input Parameters:
1563 + dm     - The DM
1564 - label  - DMLabel assinging ranks to remote roots
1565 
1566   Level: developer
1567 
1568 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1569 @*/
1570 PetscErrorCode DMPlexPartitionLabelAdjacency(DM dm, DMLabel label)
1571 {
1572   IS              rankIS,   pointIS;
1573   const PetscInt *ranks,   *points;
1574   PetscInt        numRanks, numPoints, r, p, a, adjSize;
1575   PetscInt       *adj = NULL;
1576   PetscErrorCode  ierr;
1577 
1578   PetscFunctionBegin;
1579   ierr = DMLabelGetValueIS(label, &rankIS);CHKERRQ(ierr);
1580   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
1581   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
1582   for (r = 0; r < numRanks; ++r) {
1583     const PetscInt rank = ranks[r];
1584 
1585     ierr = DMLabelGetStratumIS(label, rank, &pointIS);CHKERRQ(ierr);
1586     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
1587     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
1588     for (p = 0; p < numPoints; ++p) {
1589       adjSize = PETSC_DETERMINE;
1590       ierr = DMPlexGetAdjacency(dm, points[p], &adjSize, &adj);CHKERRQ(ierr);
1591       for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(label, adj[a], rank);CHKERRQ(ierr);}
1592     }
1593     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
1594     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1595   }
1596   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
1597   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
1598   ierr = PetscFree(adj);CHKERRQ(ierr);
1599   PetscFunctionReturn(0);
1600 }
1601 
1602 #undef __FUNCT__
1603 #define __FUNCT__ "DMPlexPartitionLabelPropagate"
1604 /*@
1605   DMPlexPartitionLabelPropagate - Propagate points in a partition label over the point SF
1606 
1607   Input Parameters:
1608 + dm     - The DM
1609 - label  - DMLabel assinging ranks to remote roots
1610 
1611   Level: developer
1612 
1613   Note: This is required when generating multi-level overlaps to capture
1614   overlap points from non-neighbouring partitions.
1615 
1616 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1617 @*/
1618 PetscErrorCode DMPlexPartitionLabelPropagate(DM dm, DMLabel label)
1619 {
1620   MPI_Comm        comm;
1621   PetscMPIInt     rank;
1622   PetscSF         sfPoint;
1623   DMLabel         lblRoots, lblLeaves;
1624   IS              rankIS, pointIS;
1625   const PetscInt *ranks;
1626   PetscInt        numRanks, r;
1627   PetscErrorCode  ierr;
1628 
1629   PetscFunctionBegin;
1630   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
1631   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1632   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
1633   /* Pull point contributions from remote leaves into local roots */
1634   ierr = DMLabelGather(label, sfPoint, &lblLeaves);CHKERRQ(ierr);
1635   ierr = DMLabelGetValueIS(lblLeaves, &rankIS);CHKERRQ(ierr);
1636   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
1637   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
1638   for (r = 0; r < numRanks; ++r) {
1639     const PetscInt remoteRank = ranks[r];
1640     if (remoteRank == rank) continue;
1641     ierr = DMLabelGetStratumIS(lblLeaves, remoteRank, &pointIS);CHKERRQ(ierr);
1642     ierr = DMLabelInsertIS(label, pointIS, remoteRank);CHKERRQ(ierr);
1643     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1644   }
1645   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
1646   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
1647   ierr = DMLabelDestroy(&lblLeaves);CHKERRQ(ierr);
1648   /* Push point contributions from roots into remote leaves */
1649   ierr = DMLabelDistribute(label, sfPoint, &lblRoots);CHKERRQ(ierr);
1650   ierr = DMLabelGetValueIS(lblRoots, &rankIS);CHKERRQ(ierr);
1651   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
1652   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
1653   for (r = 0; r < numRanks; ++r) {
1654     const PetscInt remoteRank = ranks[r];
1655     if (remoteRank == rank) continue;
1656     ierr = DMLabelGetStratumIS(lblRoots, remoteRank, &pointIS);CHKERRQ(ierr);
1657     ierr = DMLabelInsertIS(label, pointIS, remoteRank);CHKERRQ(ierr);
1658     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1659   }
1660   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
1661   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
1662   ierr = DMLabelDestroy(&lblRoots);CHKERRQ(ierr);
1663   PetscFunctionReturn(0);
1664 }
1665 
1666 #undef __FUNCT__
1667 #define __FUNCT__ "DMPlexPartitionLabelInvert"
1668 /*@
1669   DMPlexPartitionLabelInvert - Create a partition label of remote roots from a local root label
1670 
1671   Input Parameters:
1672 + dm        - The DM
1673 . rootLabel - DMLabel assinging ranks to local roots
1674 . processSF - A star forest mapping into the local index on each remote rank
1675 
1676   Output Parameter:
1677 - leafLabel - DMLabel assinging ranks to remote roots
1678 
1679   Note: The rootLabel defines a send pattern by mapping local points to remote target ranks. The
1680   resulting leafLabel is a receiver mapping of remote roots to their parent rank.
1681 
1682   Level: developer
1683 
1684 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1685 @*/
1686 PetscErrorCode DMPlexPartitionLabelInvert(DM dm, DMLabel rootLabel, PetscSF processSF, DMLabel leafLabel)
1687 {
1688   MPI_Comm           comm;
1689   PetscMPIInt        rank, numProcs;
1690   PetscInt           p, n, numNeighbors, size, l, nleaves;
1691   PetscSF            sfPoint;
1692   PetscSFNode       *rootPoints, *leafPoints;
1693   PetscSection       rootSection, leafSection;
1694   const PetscSFNode *remote;
1695   const PetscInt    *local, *neighbors;
1696   IS                 valueIS;
1697   PetscErrorCode     ierr;
1698 
1699   PetscFunctionBegin;
1700   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
1701   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1702   ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr);
1703   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
1704 
1705   /* Convert to (point, rank) and use actual owners */
1706   ierr = PetscSectionCreate(comm, &rootSection);CHKERRQ(ierr);
1707   ierr = PetscSectionSetChart(rootSection, 0, numProcs);CHKERRQ(ierr);
1708   ierr = DMLabelGetValueIS(rootLabel, &valueIS);CHKERRQ(ierr);
1709   ierr = ISGetLocalSize(valueIS, &numNeighbors);CHKERRQ(ierr);
1710   ierr = ISGetIndices(valueIS, &neighbors);CHKERRQ(ierr);
1711   for (n = 0; n < numNeighbors; ++n) {
1712     PetscInt numPoints;
1713 
1714     ierr = DMLabelGetStratumSize(rootLabel, neighbors[n], &numPoints);CHKERRQ(ierr);
1715     ierr = PetscSectionAddDof(rootSection, neighbors[n], numPoints);CHKERRQ(ierr);
1716   }
1717   ierr = PetscSectionSetUp(rootSection);CHKERRQ(ierr);
1718   ierr = PetscSectionGetStorageSize(rootSection, &size);CHKERRQ(ierr);
1719   ierr = PetscMalloc1(size, &rootPoints);CHKERRQ(ierr);
1720   ierr = PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);CHKERRQ(ierr);
1721   for (n = 0; n < numNeighbors; ++n) {
1722     IS              pointIS;
1723     const PetscInt *points;
1724     PetscInt        off, numPoints, p;
1725 
1726     ierr = PetscSectionGetOffset(rootSection, neighbors[n], &off);CHKERRQ(ierr);
1727     ierr = DMLabelGetStratumIS(rootLabel, neighbors[n], &pointIS);CHKERRQ(ierr);
1728     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
1729     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
1730     for (p = 0; p < numPoints; ++p) {
1731       if (local) {ierr = PetscFindInt(points[p], nleaves, local, &l);CHKERRQ(ierr);}
1732       else       {l = -1;}
1733       if (l >= 0) {rootPoints[off+p] = remote[l];}
1734       else        {rootPoints[off+p].index = points[p]; rootPoints[off+p].rank = rank;}
1735     }
1736     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
1737     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1738   }
1739   ierr = ISRestoreIndices(valueIS, &neighbors);CHKERRQ(ierr);
1740   ierr = ISDestroy(&valueIS);CHKERRQ(ierr);
1741   /* Communicate overlap */
1742   ierr = PetscSectionCreate(comm, &leafSection);CHKERRQ(ierr);
1743   ierr = DMPlexDistributeData(dm, processSF, rootSection, MPIU_2INT, rootPoints, leafSection, (void**) &leafPoints);CHKERRQ(ierr);
1744   /* Filter remote contributions (ovLeafPoints) into the overlapSF */
1745   ierr = PetscSectionGetStorageSize(leafSection, &size);CHKERRQ(ierr);
1746   for (p = 0; p < size; p++) {
1747     ierr = DMLabelSetValue(leafLabel, leafPoints[p].index, leafPoints[p].rank);CHKERRQ(ierr);
1748   }
1749   ierr = PetscFree(rootPoints);CHKERRQ(ierr);
1750   ierr = PetscSectionDestroy(&rootSection);CHKERRQ(ierr);
1751   ierr = PetscFree(leafPoints);CHKERRQ(ierr);
1752   ierr = PetscSectionDestroy(&leafSection);CHKERRQ(ierr);
1753   PetscFunctionReturn(0);
1754 }
1755 
1756 #undef __FUNCT__
1757 #define __FUNCT__ "DMPlexPartitionLabelCreateSF"
1758 /*@
1759   DMPlexPartitionLabelCreateSF - Create a star forest from a label that assigns ranks to points
1760 
1761   Input Parameters:
1762 + dm    - The DM
1763 . label - DMLabel assinging ranks to remote roots
1764 
1765   Output Parameter:
1766 - sf    - The star forest communication context encapsulating the defined mapping
1767 
1768   Note: The incoming label is a receiver mapping of remote points to their parent rank.
1769 
1770   Level: developer
1771 
1772 .seealso: DMPlexDistribute(), DMPlexCreateOverlap
1773 @*/
1774 PetscErrorCode DMPlexPartitionLabelCreateSF(DM dm, DMLabel label, PetscSF *sf)
1775 {
1776   PetscMPIInt     rank, numProcs;
1777   PetscInt        n, numRemote, p, numPoints, pStart, pEnd, idx = 0;
1778   PetscSFNode    *remotePoints;
1779   IS              remoteRootIS;
1780   const PetscInt *remoteRoots;
1781   PetscErrorCode ierr;
1782 
1783   PetscFunctionBegin;
1784   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
1785   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &numProcs);CHKERRQ(ierr);
1786 
1787   for (numRemote = 0, n = 0; n < numProcs; ++n) {
1788     ierr = DMLabelGetStratumSize(label, n, &numPoints);CHKERRQ(ierr);
1789     numRemote += numPoints;
1790   }
1791   ierr = PetscMalloc1(numRemote, &remotePoints);CHKERRQ(ierr);
1792   /* Put owned points first */
1793   ierr = DMLabelGetStratumSize(label, rank, &numPoints);CHKERRQ(ierr);
1794   if (numPoints > 0) {
1795     ierr = DMLabelGetStratumIS(label, rank, &remoteRootIS);CHKERRQ(ierr);
1796     ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
1797     for (p = 0; p < numPoints; p++) {
1798       remotePoints[idx].index = remoteRoots[p];
1799       remotePoints[idx].rank = rank;
1800       idx++;
1801     }
1802     ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
1803     ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr);
1804   }
1805   /* Now add remote points */
1806   for (n = 0; n < numProcs; ++n) {
1807     ierr = DMLabelGetStratumSize(label, n, &numPoints);CHKERRQ(ierr);
1808     if (numPoints <= 0 || n == rank) continue;
1809     ierr = DMLabelGetStratumIS(label, n, &remoteRootIS);CHKERRQ(ierr);
1810     ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
1811     for (p = 0; p < numPoints; p++) {
1812       remotePoints[idx].index = remoteRoots[p];
1813       remotePoints[idx].rank = n;
1814       idx++;
1815     }
1816     ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
1817     ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr);
1818   }
1819   ierr = PetscSFCreate(PetscObjectComm((PetscObject) dm), sf);CHKERRQ(ierr);
1820   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
1821   ierr = PetscSFSetGraph(*sf, pEnd-pStart, numRemote, NULL, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);CHKERRQ(ierr);
1822   PetscFunctionReturn(0);
1823 }
1824