xref: /petsc/src/dm/impls/plex/plexpartition.c (revision 1917a363a0108e5a01ea00f477c9a5d39d8d3ccc)
1 #include <petsc/private/dmpleximpl.h>   /*I      "petscdmplex.h"   I*/
2 
3 PetscClassId PETSCPARTITIONER_CLASSID = 0;
4 
5 PetscFunctionList PetscPartitionerList              = NULL;
6 PetscBool         PetscPartitionerRegisterAllCalled = PETSC_FALSE;
7 
8 PetscBool ChacoPartitionercite = PETSC_FALSE;
9 const char ChacoPartitionerCitation[] = "@inproceedings{Chaco95,\n"
10                                "  author    = {Bruce Hendrickson and Robert Leland},\n"
11                                "  title     = {A multilevel algorithm for partitioning graphs},\n"
12                                "  booktitle = {Supercomputing '95: Proceedings of the 1995 ACM/IEEE Conference on Supercomputing (CDROM)},"
13                                "  isbn      = {0-89791-816-9},\n"
14                                "  pages     = {28},\n"
15                                "  doi       = {http://doi.acm.org/10.1145/224170.224228},\n"
16                                "  publisher = {ACM Press},\n"
17                                "  address   = {New York},\n"
18                                "  year      = {1995}\n}\n";
19 
20 PetscBool ParMetisPartitionercite = PETSC_FALSE;
21 const char ParMetisPartitionerCitation[] = "@article{KarypisKumar98,\n"
22                                "  author  = {George Karypis and Vipin Kumar},\n"
23                                "  title   = {A Parallel Algorithm for Multilevel Graph Partitioning and Sparse Matrix Ordering},\n"
24                                "  journal = {Journal of Parallel and Distributed Computing},\n"
25                                "  volume  = {48},\n"
26                                "  pages   = {71--85},\n"
27                                "  year    = {1998}\n}\n";
28 
29 /*@C
30   DMPlexCreatePartitionerGraph - Create a CSR graph of point connections for the partitioner
31 
32   Input Parameters:
33 + dm      - The mesh DM dm
34 - height  - Height of the strata from which to construct the graph
35 
36   Output Parameter:
37 + numVertices     - Number of vertices in the graph
38 . offsets         - Point offsets in the graph
39 . adjacency       - Point connectivity in the graph
40 - globalNumbering - A map from the local cell numbering to the global numbering used in "adjacency".  Negative indicates that the cell is a duplicate from another process.
41 
42   The user can control the definition of adjacency for the mesh using DMPlexGetAdjacencyUseCone() and
43   DMPlexSetAdjacencyUseClosure(). They should choose the combination appropriate for the function
44   representation on the mesh.
45 
46   Level: developer
47 
48 .seealso: PetscPartitionerGetType(), PetscPartitionerCreate(), DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure()
49 @*/
50 PetscErrorCode DMPlexCreatePartitionerGraph(DM dm, PetscInt height, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency, IS *globalNumbering)
51 {
52   PetscInt       p, pStart, pEnd, a, adjSize, idx, size, nroots;
53   PetscInt      *adj = NULL, *vOffsets = NULL, *graph = NULL;
54   IS             cellNumbering;
55   const PetscInt *cellNum;
56   PetscBool      useCone, useClosure;
57   PetscSection   section;
58   PetscSegBuffer adjBuffer;
59   PetscSF        sfPoint;
60   PetscMPIInt    rank;
61   PetscErrorCode ierr;
62 
63   PetscFunctionBegin;
64   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
65   ierr = DMPlexGetHeightStratum(dm, height, &pStart, &pEnd);CHKERRQ(ierr);
66   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
67   ierr = PetscSFGetGraph(sfPoint, &nroots, NULL, NULL, NULL);CHKERRQ(ierr);
68   /* Build adjacency graph via a section/segbuffer */
69   ierr = PetscSectionCreate(PetscObjectComm((PetscObject) dm), &section);CHKERRQ(ierr);
70   ierr = PetscSectionSetChart(section, pStart, pEnd);CHKERRQ(ierr);
71   ierr = PetscSegBufferCreate(sizeof(PetscInt),1000,&adjBuffer);CHKERRQ(ierr);
72   /* Always use FVM adjacency to create partitioner graph */
73   ierr = DMPlexGetAdjacencyUseCone(dm, &useCone);CHKERRQ(ierr);
74   ierr = DMPlexGetAdjacencyUseClosure(dm, &useClosure);CHKERRQ(ierr);
75   ierr = DMPlexSetAdjacencyUseCone(dm, PETSC_TRUE);CHKERRQ(ierr);
76   ierr = DMPlexSetAdjacencyUseClosure(dm, PETSC_FALSE);CHKERRQ(ierr);
77   ierr = DMPlexCreateCellNumbering_Internal(dm, PETSC_TRUE, &cellNumbering);CHKERRQ(ierr);
78   if (globalNumbering) {
79     ierr = PetscObjectReference((PetscObject)cellNumbering);CHKERRQ(ierr);
80     *globalNumbering = cellNumbering;
81   }
82   ierr = ISGetIndices(cellNumbering, &cellNum);CHKERRQ(ierr);
83   for (*numVertices = 0, p = pStart; p < pEnd; p++) {
84     /* Skip non-owned cells in parallel (ParMetis expects no overlap) */
85     if (nroots > 0) {if (cellNum[p] < 0) continue;}
86     adjSize = PETSC_DETERMINE;
87     ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
88     for (a = 0; a < adjSize; ++a) {
89       const PetscInt point = adj[a];
90       if (point != p && pStart <= point && point < pEnd) {
91         PetscInt *PETSC_RESTRICT pBuf;
92         ierr = PetscSectionAddDof(section, p, 1);CHKERRQ(ierr);
93         ierr = PetscSegBufferGetInts(adjBuffer, 1, &pBuf);CHKERRQ(ierr);
94         *pBuf = point;
95       }
96     }
97     (*numVertices)++;
98   }
99   ierr = DMPlexSetAdjacencyUseCone(dm, useCone);CHKERRQ(ierr);
100   ierr = DMPlexSetAdjacencyUseClosure(dm, useClosure);CHKERRQ(ierr);
101   /* Derive CSR graph from section/segbuffer */
102   ierr = PetscSectionSetUp(section);CHKERRQ(ierr);
103   ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr);
104   ierr = PetscMalloc1(*numVertices+1, &vOffsets);CHKERRQ(ierr);
105   for (idx = 0, p = pStart; p < pEnd; p++) {
106     if (nroots > 0) {if (cellNum[p] < 0) continue;}
107     ierr = PetscSectionGetOffset(section, p, &(vOffsets[idx++]));CHKERRQ(ierr);
108   }
109   vOffsets[*numVertices] = size;
110   if (offsets) *offsets = vOffsets;
111   ierr = PetscSegBufferExtractAlloc(adjBuffer, &graph);CHKERRQ(ierr);
112   {
113     ISLocalToGlobalMapping ltogCells;
114     PetscInt n, size, *cells_arr;
115     /* In parallel, apply a global cell numbering to the graph */
116     ierr = ISRestoreIndices(cellNumbering, &cellNum);CHKERRQ(ierr);
117     ierr = ISLocalToGlobalMappingCreateIS(cellNumbering, &ltogCells);CHKERRQ(ierr);
118     ierr = ISLocalToGlobalMappingGetSize(ltogCells, &size);CHKERRQ(ierr);
119     ierr = ISLocalToGlobalMappingGetIndices(ltogCells, (const PetscInt**)&cells_arr);CHKERRQ(ierr);
120     /* Convert to positive global cell numbers */
121     for (n=0; n<size; n++) {if (cells_arr[n] < 0) cells_arr[n] = -(cells_arr[n]+1);}
122     ierr = ISLocalToGlobalMappingRestoreIndices(ltogCells, (const PetscInt**)&cells_arr);CHKERRQ(ierr);
123     ierr = ISLocalToGlobalMappingApplyBlock(ltogCells, vOffsets[*numVertices], graph, graph);CHKERRQ(ierr);
124     ierr = ISLocalToGlobalMappingDestroy(&ltogCells);CHKERRQ(ierr);
125     ierr = ISDestroy(&cellNumbering);CHKERRQ(ierr);
126   }
127   if (adjacency) *adjacency = graph;
128   /* Clean up */
129   ierr = PetscSegBufferDestroy(&adjBuffer);CHKERRQ(ierr);
130   ierr = PetscSectionDestroy(&section);CHKERRQ(ierr);
131   ierr = PetscFree(adj);CHKERRQ(ierr);
132   PetscFunctionReturn(0);
133 }
134 
135 /*@C
136   DMPlexCreateNeighborCSR - Create a mesh graph (cell-cell adjacency) in parallel CSR format.
137 
138   Collective
139 
140   Input Arguments:
141 + dm - The DMPlex
142 - cellHeight - The height of mesh points to treat as cells (default should be 0)
143 
144   Output Arguments:
145 + numVertices - The number of local vertices in the graph, or cells in the mesh.
146 . offsets     - The offset to the adjacency list for each cell
147 - adjacency   - The adjacency list for all cells
148 
149   Note: This is suitable for input to a mesh partitioner like ParMetis.
150 
151   Level: advanced
152 
153 .seealso: DMPlexCreate()
154 @*/
155 PetscErrorCode DMPlexCreateNeighborCSR(DM dm, PetscInt cellHeight, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency)
156 {
157   const PetscInt maxFaceCases = 30;
158   PetscInt       numFaceCases = 0;
159   PetscInt       numFaceVertices[30]; /* maxFaceCases, C89 sucks sucks sucks */
160   PetscInt      *off, *adj;
161   PetscInt      *neighborCells = NULL;
162   PetscInt       dim, cellDim, depth = 0, faceDepth, cStart, cEnd, c, numCells, cell;
163   PetscErrorCode ierr;
164 
165   PetscFunctionBegin;
166   /* For parallel partitioning, I think you have to communicate supports */
167   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
168   cellDim = dim - cellHeight;
169   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
170   ierr = DMPlexGetHeightStratum(dm, cellHeight, &cStart, &cEnd);CHKERRQ(ierr);
171   if (cEnd - cStart == 0) {
172     if (numVertices) *numVertices = 0;
173     if (offsets)   *offsets   = NULL;
174     if (adjacency) *adjacency = NULL;
175     PetscFunctionReturn(0);
176   }
177   numCells  = cEnd - cStart;
178   faceDepth = depth - cellHeight;
179   if (dim == depth) {
180     PetscInt f, fStart, fEnd;
181 
182     ierr = PetscCalloc1(numCells+1, &off);CHKERRQ(ierr);
183     /* Count neighboring cells */
184     ierr = DMPlexGetHeightStratum(dm, cellHeight+1, &fStart, &fEnd);CHKERRQ(ierr);
185     for (f = fStart; f < fEnd; ++f) {
186       const PetscInt *support;
187       PetscInt        supportSize;
188       ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
189       ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
190       if (supportSize == 2) {
191         PetscInt numChildren;
192 
193         ierr = DMPlexGetTreeChildren(dm,f,&numChildren,NULL);CHKERRQ(ierr);
194         if (!numChildren) {
195           ++off[support[0]-cStart+1];
196           ++off[support[1]-cStart+1];
197         }
198       }
199     }
200     /* Prefix sum */
201     for (c = 1; c <= numCells; ++c) off[c] += off[c-1];
202     if (adjacency) {
203       PetscInt *tmp;
204 
205       ierr = PetscMalloc1(off[numCells], &adj);CHKERRQ(ierr);
206       ierr = PetscMalloc1(numCells+1, &tmp);CHKERRQ(ierr);
207       ierr = PetscMemcpy(tmp, off, (numCells+1) * sizeof(PetscInt));CHKERRQ(ierr);
208       /* Get neighboring cells */
209       for (f = fStart; f < fEnd; ++f) {
210         const PetscInt *support;
211         PetscInt        supportSize;
212         ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
213         ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
214         if (supportSize == 2) {
215           PetscInt numChildren;
216 
217           ierr = DMPlexGetTreeChildren(dm,f,&numChildren,NULL);CHKERRQ(ierr);
218           if (!numChildren) {
219             adj[tmp[support[0]-cStart]++] = support[1];
220             adj[tmp[support[1]-cStart]++] = support[0];
221           }
222         }
223       }
224 #if defined(PETSC_USE_DEBUG)
225       for (c = 0; c < cEnd-cStart; ++c) if (tmp[c] != off[c+1]) SETERRQ3(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Offset %d != %d for cell %d", tmp[c], off[c], c+cStart);
226 #endif
227       ierr = PetscFree(tmp);CHKERRQ(ierr);
228     }
229     if (numVertices) *numVertices = numCells;
230     if (offsets)   *offsets   = off;
231     if (adjacency) *adjacency = adj;
232     PetscFunctionReturn(0);
233   }
234   /* Setup face recognition */
235   if (faceDepth == 1) {
236     PetscInt cornersSeen[30] = {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0}; /* Could use PetscBT */
237 
238     for (c = cStart; c < cEnd; ++c) {
239       PetscInt corners;
240 
241       ierr = DMPlexGetConeSize(dm, c, &corners);CHKERRQ(ierr);
242       if (!cornersSeen[corners]) {
243         PetscInt nFV;
244 
245         if (numFaceCases >= maxFaceCases) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_PLIB, "Exceeded maximum number of face recognition cases");
246         cornersSeen[corners] = 1;
247 
248         ierr = DMPlexGetNumFaceVertices(dm, cellDim, corners, &nFV);CHKERRQ(ierr);
249 
250         numFaceVertices[numFaceCases++] = nFV;
251       }
252     }
253   }
254   ierr = PetscCalloc1(numCells+1, &off);CHKERRQ(ierr);
255   /* Count neighboring cells */
256   for (cell = cStart; cell < cEnd; ++cell) {
257     PetscInt numNeighbors = PETSC_DETERMINE, n;
258 
259     ierr = DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);CHKERRQ(ierr);
260     /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
261     for (n = 0; n < numNeighbors; ++n) {
262       PetscInt        cellPair[2];
263       PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
264       PetscInt        meetSize = 0;
265       const PetscInt *meet    = NULL;
266 
267       cellPair[0] = cell; cellPair[1] = neighborCells[n];
268       if (cellPair[0] == cellPair[1]) continue;
269       if (!found) {
270         ierr = DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
271         if (meetSize) {
272           PetscInt f;
273 
274           for (f = 0; f < numFaceCases; ++f) {
275             if (numFaceVertices[f] == meetSize) {
276               found = PETSC_TRUE;
277               break;
278             }
279           }
280         }
281         ierr = DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
282       }
283       if (found) ++off[cell-cStart+1];
284     }
285   }
286   /* Prefix sum */
287   for (cell = 1; cell <= numCells; ++cell) off[cell] += off[cell-1];
288 
289   if (adjacency) {
290     ierr = PetscMalloc1(off[numCells], &adj);CHKERRQ(ierr);
291     /* Get neighboring cells */
292     for (cell = cStart; cell < cEnd; ++cell) {
293       PetscInt numNeighbors = PETSC_DETERMINE, n;
294       PetscInt cellOffset   = 0;
295 
296       ierr = DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);CHKERRQ(ierr);
297       /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
298       for (n = 0; n < numNeighbors; ++n) {
299         PetscInt        cellPair[2];
300         PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
301         PetscInt        meetSize = 0;
302         const PetscInt *meet    = NULL;
303 
304         cellPair[0] = cell; cellPair[1] = neighborCells[n];
305         if (cellPair[0] == cellPair[1]) continue;
306         if (!found) {
307           ierr = DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
308           if (meetSize) {
309             PetscInt f;
310 
311             for (f = 0; f < numFaceCases; ++f) {
312               if (numFaceVertices[f] == meetSize) {
313                 found = PETSC_TRUE;
314                 break;
315               }
316             }
317           }
318           ierr = DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
319         }
320         if (found) {
321           adj[off[cell-cStart]+cellOffset] = neighborCells[n];
322           ++cellOffset;
323         }
324       }
325     }
326   }
327   ierr = PetscFree(neighborCells);CHKERRQ(ierr);
328   if (numVertices) *numVertices = numCells;
329   if (offsets)   *offsets   = off;
330   if (adjacency) *adjacency = adj;
331   PetscFunctionReturn(0);
332 }
333 
334 /*@C
335   PetscPartitionerRegister - Adds a new PetscPartitioner implementation
336 
337   Not Collective
338 
339   Input Parameters:
340 + name        - The name of a new user-defined creation routine
341 - create_func - The creation routine itself
342 
343   Notes:
344   PetscPartitionerRegister() may be called multiple times to add several user-defined PetscPartitioners
345 
346   Sample usage:
347 .vb
348     PetscPartitionerRegister("my_part", MyPetscPartitionerCreate);
349 .ve
350 
351   Then, your PetscPartitioner type can be chosen with the procedural interface via
352 .vb
353     PetscPartitionerCreate(MPI_Comm, PetscPartitioner *);
354     PetscPartitionerSetType(PetscPartitioner, "my_part");
355 .ve
356    or at runtime via the option
357 .vb
358     -petscpartitioner_type my_part
359 .ve
360 
361   Level: advanced
362 
363 .keywords: PetscPartitioner, register
364 .seealso: PetscPartitionerRegisterAll(), PetscPartitionerRegisterDestroy()
365 
366 @*/
367 PetscErrorCode PetscPartitionerRegister(const char sname[], PetscErrorCode (*function)(PetscPartitioner))
368 {
369   PetscErrorCode ierr;
370 
371   PetscFunctionBegin;
372   ierr = PetscFunctionListAdd(&PetscPartitionerList, sname, function);CHKERRQ(ierr);
373   PetscFunctionReturn(0);
374 }
375 
376 /*@C
377   PetscPartitionerSetType - Builds a particular PetscPartitioner
378 
379   Collective on PetscPartitioner
380 
381   Input Parameters:
382 + part - The PetscPartitioner object
383 - name - The kind of partitioner
384 
385   Options Database Key:
386 . -petscpartitioner_type <type> - Sets the PetscPartitioner type; use -help for a list of available types
387 
388   Level: intermediate
389 
390 .keywords: PetscPartitioner, set, type
391 .seealso: PetscPartitionerGetType(), PetscPartitionerCreate()
392 @*/
393 PetscErrorCode PetscPartitionerSetType(PetscPartitioner part, PetscPartitionerType name)
394 {
395   PetscErrorCode (*r)(PetscPartitioner);
396   PetscBool      match;
397   PetscErrorCode ierr;
398 
399   PetscFunctionBegin;
400   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
401   ierr = PetscObjectTypeCompare((PetscObject) part, name, &match);CHKERRQ(ierr);
402   if (match) PetscFunctionReturn(0);
403 
404   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
405   ierr = PetscFunctionListFind(PetscPartitionerList, name, &r);CHKERRQ(ierr);
406   if (!r) SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown PetscPartitioner type: %s", name);
407 
408   if (part->ops->destroy) {
409     ierr              = (*part->ops->destroy)(part);CHKERRQ(ierr);
410     part->ops->destroy = NULL;
411   }
412   ierr = (*r)(part);CHKERRQ(ierr);
413   ierr = PetscObjectChangeTypeName((PetscObject) part, name);CHKERRQ(ierr);
414   PetscFunctionReturn(0);
415 }
416 
417 /*@C
418   PetscPartitionerGetType - Gets the PetscPartitioner type name (as a string) from the object.
419 
420   Not Collective
421 
422   Input Parameter:
423 . part - The PetscPartitioner
424 
425   Output Parameter:
426 . name - The PetscPartitioner type name
427 
428   Level: intermediate
429 
430 .keywords: PetscPartitioner, get, type, name
431 .seealso: PetscPartitionerSetType(), PetscPartitionerCreate()
432 @*/
433 PetscErrorCode PetscPartitionerGetType(PetscPartitioner part, PetscPartitionerType *name)
434 {
435   PetscErrorCode ierr;
436 
437   PetscFunctionBegin;
438   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
439   PetscValidPointer(name, 2);
440   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
441   *name = ((PetscObject) part)->type_name;
442   PetscFunctionReturn(0);
443 }
444 
445 /*@C
446   PetscPartitionerView - Views a PetscPartitioner
447 
448   Collective on PetscPartitioner
449 
450   Input Parameter:
451 + part - the PetscPartitioner object to view
452 - v    - the viewer
453 
454   Level: developer
455 
456 .seealso: PetscPartitionerDestroy()
457 @*/
458 PetscErrorCode PetscPartitionerView(PetscPartitioner part, PetscViewer v)
459 {
460   PetscErrorCode ierr;
461 
462   PetscFunctionBegin;
463   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
464   if (!v) {ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject) part), &v);CHKERRQ(ierr);}
465   if (part->ops->view) {ierr = (*part->ops->view)(part, v);CHKERRQ(ierr);}
466   PetscFunctionReturn(0);
467 }
468 
469 PetscErrorCode PetscPartitionerSetTypeFromOptions_Internal(PetscPartitioner part)
470 {
471   const char    *defaultType;
472   char           name[256];
473   PetscBool      flg;
474   PetscErrorCode ierr;
475 
476   PetscFunctionBegin;
477   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
478   if (!((PetscObject) part)->type_name)
479 #if defined(PETSC_HAVE_CHACO)
480     defaultType = PETSCPARTITIONERCHACO;
481 #elif defined(PETSC_HAVE_PARMETIS)
482     defaultType = PETSCPARTITIONERPARMETIS;
483 #else
484     defaultType = PETSCPARTITIONERSIMPLE;
485 #endif
486   else
487     defaultType = ((PetscObject) part)->type_name;
488   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
489 
490   ierr = PetscObjectOptionsBegin((PetscObject) part);CHKERRQ(ierr);
491   ierr = PetscOptionsFList("-petscpartitioner_type", "Graph partitioner", "PetscPartitionerSetType", PetscPartitionerList, defaultType, name, 256, &flg);CHKERRQ(ierr);
492   if (flg) {
493     ierr = PetscPartitionerSetType(part, name);CHKERRQ(ierr);
494   } else if (!((PetscObject) part)->type_name) {
495     ierr = PetscPartitionerSetType(part, defaultType);CHKERRQ(ierr);
496   }
497   ierr = PetscOptionsEnd();CHKERRQ(ierr);
498   PetscFunctionReturn(0);
499 }
500 
501 /*@
502   PetscPartitionerSetFromOptions - sets parameters in a PetscPartitioner from the options database
503 
504   Collective on PetscPartitioner
505 
506   Input Parameter:
507 . part - the PetscPartitioner object to set options for
508 
509   Level: developer
510 
511 .seealso: PetscPartitionerView()
512 @*/
513 PetscErrorCode PetscPartitionerSetFromOptions(PetscPartitioner part)
514 {
515   PetscErrorCode ierr;
516 
517   PetscFunctionBegin;
518   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
519   ierr = PetscPartitionerSetTypeFromOptions_Internal(part);CHKERRQ(ierr);
520 
521   ierr = PetscObjectOptionsBegin((PetscObject) part);CHKERRQ(ierr);
522   if (part->ops->setfromoptions) {ierr = (*part->ops->setfromoptions)(PetscOptionsObject,part);CHKERRQ(ierr);}
523   /* process any options handlers added with PetscObjectAddOptionsHandler() */
524   ierr = PetscObjectProcessOptionsHandlers(PetscOptionsObject,(PetscObject) part);CHKERRQ(ierr);
525   ierr = PetscOptionsEnd();CHKERRQ(ierr);
526   ierr = PetscPartitionerViewFromOptions(part, NULL, "-petscpartitioner_view");CHKERRQ(ierr);
527   PetscFunctionReturn(0);
528 }
529 
530 /*@C
531   PetscPartitionerSetUp - Construct data structures for the PetscPartitioner
532 
533   Collective on PetscPartitioner
534 
535   Input Parameter:
536 . part - the PetscPartitioner object to setup
537 
538   Level: developer
539 
540 .seealso: PetscPartitionerView(), PetscPartitionerDestroy()
541 @*/
542 PetscErrorCode PetscPartitionerSetUp(PetscPartitioner part)
543 {
544   PetscErrorCode ierr;
545 
546   PetscFunctionBegin;
547   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
548   if (part->ops->setup) {ierr = (*part->ops->setup)(part);CHKERRQ(ierr);}
549   PetscFunctionReturn(0);
550 }
551 
552 /*@
553   PetscPartitionerDestroy - Destroys a PetscPartitioner object
554 
555   Collective on PetscPartitioner
556 
557   Input Parameter:
558 . part - the PetscPartitioner object to destroy
559 
560   Level: developer
561 
562 .seealso: PetscPartitionerView()
563 @*/
564 PetscErrorCode PetscPartitionerDestroy(PetscPartitioner *part)
565 {
566   PetscErrorCode ierr;
567 
568   PetscFunctionBegin;
569   if (!*part) PetscFunctionReturn(0);
570   PetscValidHeaderSpecific((*part), PETSCPARTITIONER_CLASSID, 1);
571 
572   if (--((PetscObject)(*part))->refct > 0) {*part = 0; PetscFunctionReturn(0);}
573   ((PetscObject) (*part))->refct = 0;
574 
575   if ((*part)->ops->destroy) {ierr = (*(*part)->ops->destroy)(*part);CHKERRQ(ierr);}
576   ierr = PetscHeaderDestroy(part);CHKERRQ(ierr);
577   PetscFunctionReturn(0);
578 }
579 
580 /*@
581   PetscPartitionerCreate - Creates an empty PetscPartitioner object. The type can then be set with PetscPartitionerSetType().
582 
583   Collective on MPI_Comm
584 
585   Input Parameter:
586 . comm - The communicator for the PetscPartitioner object
587 
588   Output Parameter:
589 . part - The PetscPartitioner object
590 
591   Level: beginner
592 
593 .seealso: PetscPartitionerSetType(), PETSCPARTITIONERCHACO, PETSCPARTITIONERPARMETIS, PETSCPARTITIONERSHELL, PETSCPARTITIONERSIMPLE, PETSCPARTITIONERGATHER
594 @*/
595 PetscErrorCode PetscPartitionerCreate(MPI_Comm comm, PetscPartitioner *part)
596 {
597   PetscPartitioner p;
598   PetscErrorCode   ierr;
599 
600   PetscFunctionBegin;
601   PetscValidPointer(part, 2);
602   *part = NULL;
603   ierr = DMInitializePackage();CHKERRQ(ierr);
604 
605   ierr = PetscHeaderCreate(p, PETSCPARTITIONER_CLASSID, "PetscPartitioner", "Graph Partitioner", "PetscPartitioner", comm, PetscPartitionerDestroy, PetscPartitionerView);CHKERRQ(ierr);
606 
607   *part = p;
608   PetscFunctionReturn(0);
609 }
610 
611 /*@
612   PetscPartitionerPartition - Create a non-overlapping partition of the cells in the mesh
613 
614   Collective on DM
615 
616   Input Parameters:
617 + part    - The PetscPartitioner
618 - dm      - The mesh DM
619 
620   Output Parameters:
621 + partSection     - The PetscSection giving the division of points by partition
622 - partition       - The list of points by partition
623 
624   Note: Instead of cells, points at a given height can be partitioned by calling PetscPartitionerSetPointHeight()
625 
626   Level: developer
627 
628 .seealso DMPlexDistribute(), PetscPartitionerSetPointHeight(), PetscPartitionerCreate()
629 @*/
630 PetscErrorCode PetscPartitionerPartition(PetscPartitioner part, DM dm, PetscSection partSection, IS *partition)
631 {
632   PetscMPIInt    size;
633   PetscErrorCode ierr;
634 
635   PetscFunctionBegin;
636   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
637   PetscValidHeaderSpecific(dm, DM_CLASSID, 2);
638   PetscValidHeaderSpecific(partSection, PETSC_SECTION_CLASSID, 4);
639   PetscValidPointer(partition, 5);
640   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) part), &size);CHKERRQ(ierr);
641   if (size == 1) {
642     PetscInt *points;
643     PetscInt  cStart, cEnd, c;
644 
645     ierr = DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);CHKERRQ(ierr);
646     ierr = PetscSectionSetChart(partSection, 0, size);CHKERRQ(ierr);
647     ierr = PetscSectionSetDof(partSection, 0, cEnd-cStart);CHKERRQ(ierr);
648     ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
649     ierr = PetscMalloc1(cEnd-cStart, &points);CHKERRQ(ierr);
650     for (c = cStart; c < cEnd; ++c) points[c] = c;
651     ierr = ISCreateGeneral(PetscObjectComm((PetscObject) part), cEnd-cStart, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
652     PetscFunctionReturn(0);
653   }
654   if (part->height == 0) {
655     PetscInt numVertices;
656     PetscInt *start     = NULL;
657     PetscInt *adjacency = NULL;
658     IS       globalNumbering;
659 
660     ierr = DMPlexCreatePartitionerGraph(dm, 0, &numVertices, &start, &adjacency, &globalNumbering);CHKERRQ(ierr);
661     if (!part->ops->partition) SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_WRONGSTATE, "PetscPartitioner has no type");
662     ierr = (*part->ops->partition)(part, dm, size, numVertices, start, adjacency, partSection, partition);CHKERRQ(ierr);
663     ierr = PetscFree(start);CHKERRQ(ierr);
664     ierr = PetscFree(adjacency);CHKERRQ(ierr);
665     if (globalNumbering) { /* partition is wrt global unique numbering: change this to be wrt local numbering */
666       const PetscInt *globalNum;
667       const PetscInt *partIdx;
668       PetscInt *map, cStart, cEnd;
669       PetscInt *adjusted, i, localSize, offset;
670       IS    newPartition;
671 
672       ierr = ISGetLocalSize(*partition,&localSize);CHKERRQ(ierr);
673       ierr = PetscMalloc1(localSize,&adjusted);CHKERRQ(ierr);
674       ierr = ISGetIndices(globalNumbering,&globalNum);CHKERRQ(ierr);
675       ierr = ISGetIndices(*partition,&partIdx);CHKERRQ(ierr);
676       ierr = PetscMalloc1(localSize,&map);CHKERRQ(ierr);
677       ierr = DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);CHKERRQ(ierr);
678       for (i = cStart, offset = 0; i < cEnd; i++) {
679         if (globalNum[i - cStart] >= 0) map[offset++] = i;
680       }
681       for (i = 0; i < localSize; i++) {
682         adjusted[i] = map[partIdx[i]];
683       }
684       ierr = PetscFree(map);CHKERRQ(ierr);
685       ierr = ISRestoreIndices(*partition,&partIdx);CHKERRQ(ierr);
686       ierr = ISRestoreIndices(globalNumbering,&globalNum);CHKERRQ(ierr);
687       ierr = ISCreateGeneral(PETSC_COMM_SELF,localSize,adjusted,PETSC_OWN_POINTER,&newPartition);CHKERRQ(ierr);
688       ierr = ISDestroy(&globalNumbering);CHKERRQ(ierr);
689       ierr = ISDestroy(partition);CHKERRQ(ierr);
690       *partition = newPartition;
691     }
692   } else SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_OUTOFRANGE, "Invalid height %D for points to partition", part->height);
693   PetscFunctionReturn(0);
694 
695 }
696 
697 static PetscErrorCode PetscPartitionerDestroy_Shell(PetscPartitioner part)
698 {
699   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
700   PetscErrorCode          ierr;
701 
702   PetscFunctionBegin;
703   ierr = PetscSectionDestroy(&p->section);CHKERRQ(ierr);
704   ierr = ISDestroy(&p->partition);CHKERRQ(ierr);
705   ierr = PetscFree(p);CHKERRQ(ierr);
706   PetscFunctionReturn(0);
707 }
708 
709 static PetscErrorCode PetscPartitionerView_Shell_Ascii(PetscPartitioner part, PetscViewer viewer)
710 {
711   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
712   PetscViewerFormat       format;
713   PetscErrorCode          ierr;
714 
715   PetscFunctionBegin;
716   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
717   ierr = PetscViewerASCIIPrintf(viewer, "Shell Graph Partitioner:\n");CHKERRQ(ierr);
718   if (p->random) {
719     ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
720     ierr = PetscViewerASCIIPrintf(viewer, "using random partition\n");CHKERRQ(ierr);
721     ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
722   }
723   PetscFunctionReturn(0);
724 }
725 
726 static PetscErrorCode PetscPartitionerView_Shell(PetscPartitioner part, PetscViewer viewer)
727 {
728   PetscBool      iascii;
729   PetscErrorCode ierr;
730 
731   PetscFunctionBegin;
732   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
733   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
734   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
735   if (iascii) {ierr = PetscPartitionerView_Shell_Ascii(part, viewer);CHKERRQ(ierr);}
736   PetscFunctionReturn(0);
737 }
738 
739 static PetscErrorCode PetscPartitionerSetFromOptions_Shell(PetscOptionItems *PetscOptionsObject, PetscPartitioner part)
740 {
741   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
742   PetscErrorCode          ierr;
743 
744   PetscFunctionBegin;
745   ierr = PetscOptionsHead(PetscOptionsObject, "PetscPartitionerShell Options");CHKERRQ(ierr);
746   ierr = PetscOptionsBool("-petscpartitioner_shell_random", "Use a random partition", "PetscPartitionerView", PETSC_FALSE, &p->random, NULL);CHKERRQ(ierr);
747   ierr = PetscOptionsTail();CHKERRQ(ierr);
748   PetscFunctionReturn(0);
749 }
750 
751 static PetscErrorCode PetscPartitionerPartition_Shell(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
752 {
753   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
754   PetscInt                np;
755   PetscErrorCode          ierr;
756 
757   PetscFunctionBegin;
758   if (p->random) {
759     PetscRandom r;
760     PetscInt   *sizes, *points, v;
761 
762     ierr = PetscRandomCreate(PETSC_COMM_SELF, &r);CHKERRQ(ierr);
763     ierr = PetscRandomSetInterval(r, 0.0, (PetscScalar) (nparts+1));CHKERRQ(ierr);
764     ierr = PetscRandomSetFromOptions(r);CHKERRQ(ierr);
765     ierr = PetscCalloc2(nparts, &sizes, numVertices, &points);CHKERRQ(ierr);
766     for (v = 0; v < numVertices; ++v) {
767       PetscReal val;
768       PetscInt  part;
769 
770       ierr = PetscRandomGetValueReal(r, &val);CHKERRQ(ierr);
771       part = PetscFloorReal(val);
772       points[v] = part;
773       ++sizes[part];
774     }
775     ierr = PetscRandomDestroy(&r);CHKERRQ(ierr);
776     ierr = PetscPartitionerShellSetPartition(part, nparts, sizes, points);CHKERRQ(ierr);
777     ierr = PetscFree2(sizes, points);CHKERRQ(ierr);
778   }
779   ierr = PetscSectionGetChart(p->section, NULL, &np);CHKERRQ(ierr);
780   if (nparts != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of requested partitions %d != configured partitions %d", nparts, np);
781   ierr = ISGetLocalSize(p->partition, &np);CHKERRQ(ierr);
782   if (numVertices != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of input vertices %d != configured vertices %d", numVertices, np);
783   ierr = PetscSectionCopy(p->section, partSection);CHKERRQ(ierr);
784   *partition = p->partition;
785   ierr = PetscObjectReference((PetscObject) p->partition);CHKERRQ(ierr);
786   PetscFunctionReturn(0);
787 }
788 
789 static PetscErrorCode PetscPartitionerInitialize_Shell(PetscPartitioner part)
790 {
791   PetscFunctionBegin;
792   part->ops->view           = PetscPartitionerView_Shell;
793   part->ops->setfromoptions = PetscPartitionerSetFromOptions_Shell;
794   part->ops->destroy        = PetscPartitionerDestroy_Shell;
795   part->ops->partition      = PetscPartitionerPartition_Shell;
796   PetscFunctionReturn(0);
797 }
798 
799 /*MC
800   PETSCPARTITIONERSHELL = "shell" - A PetscPartitioner object
801 
802   Level: intermediate
803 
804 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
805 M*/
806 
807 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Shell(PetscPartitioner part)
808 {
809   PetscPartitioner_Shell *p;
810   PetscErrorCode          ierr;
811 
812   PetscFunctionBegin;
813   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
814   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
815   part->data = p;
816 
817   ierr = PetscPartitionerInitialize_Shell(part);CHKERRQ(ierr);
818   p->random = PETSC_FALSE;
819   PetscFunctionReturn(0);
820 }
821 
822 /*@C
823   PetscPartitionerShellSetPartition - Set an artifical partition for a mesh
824 
825   Collective on Part
826 
827   Input Parameters:
828 + part     - The PetscPartitioner
829 . size - The number of partitions
830 . sizes    - array of size size (or NULL) providing the number of points in each partition
831 - points   - array of size sum(sizes) (may be NULL iff sizes is NULL), a permutation of the points that groups those assigned to each partition in order (i.e., partition 0 first, partition 1 next, etc.)
832 
833   Level: developer
834 
835   Notes:
836 
837     It is safe to free the sizes and points arrays after use in this routine.
838 
839 .seealso DMPlexDistribute(), PetscPartitionerCreate()
840 @*/
841 PetscErrorCode PetscPartitionerShellSetPartition(PetscPartitioner part, PetscInt size, const PetscInt sizes[], const PetscInt points[])
842 {
843   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
844   PetscInt                proc, numPoints;
845   PetscErrorCode          ierr;
846 
847   PetscFunctionBegin;
848   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
849   if (sizes) {PetscValidPointer(sizes, 3);}
850   if (sizes) {PetscValidPointer(points, 4);}
851   ierr = PetscSectionDestroy(&p->section);CHKERRQ(ierr);
852   ierr = ISDestroy(&p->partition);CHKERRQ(ierr);
853   ierr = PetscSectionCreate(PetscObjectComm((PetscObject) part), &p->section);CHKERRQ(ierr);
854   ierr = PetscSectionSetChart(p->section, 0, size);CHKERRQ(ierr);
855   if (sizes) {
856     for (proc = 0; proc < size; ++proc) {
857       ierr = PetscSectionSetDof(p->section, proc, sizes[proc]);CHKERRQ(ierr);
858     }
859   }
860   ierr = PetscSectionSetUp(p->section);CHKERRQ(ierr);
861   ierr = PetscSectionGetStorageSize(p->section, &numPoints);CHKERRQ(ierr);
862   ierr = ISCreateGeneral(PetscObjectComm((PetscObject) part), numPoints, points, PETSC_COPY_VALUES, &p->partition);CHKERRQ(ierr);
863   PetscFunctionReturn(0);
864 }
865 
866 /*@
867   PetscPartitionerShellSetRandom - Set the flag to use a random partition
868 
869   Collective on Part
870 
871   Input Parameters:
872 + part   - The PetscPartitioner
873 - random - The flag to use a random partition
874 
875   Level: intermediate
876 
877 .seealso PetscPartitionerShellGetRandom(), PetscPartitionerCreate()
878 @*/
879 PetscErrorCode PetscPartitionerShellSetRandom(PetscPartitioner part, PetscBool random)
880 {
881   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
882 
883   PetscFunctionBegin;
884   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
885   p->random = random;
886   PetscFunctionReturn(0);
887 }
888 
889 /*@
890   PetscPartitionerShellGetRandom - get the flag to use a random partition
891 
892   Collective on Part
893 
894   Input Parameter:
895 . part   - The PetscPartitioner
896 
897   Output Parameter
898 . random - The flag to use a random partition
899 
900   Level: intermediate
901 
902 .seealso PetscPartitionerShellSetRandom(), PetscPartitionerCreate()
903 @*/
904 PetscErrorCode PetscPartitionerShellGetRandom(PetscPartitioner part, PetscBool *random)
905 {
906   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
907 
908   PetscFunctionBegin;
909   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
910   PetscValidPointer(random, 2);
911   *random = p->random;
912   PetscFunctionReturn(0);
913 }
914 
915 static PetscErrorCode PetscPartitionerDestroy_Simple(PetscPartitioner part)
916 {
917   PetscPartitioner_Simple *p = (PetscPartitioner_Simple *) part->data;
918   PetscErrorCode          ierr;
919 
920   PetscFunctionBegin;
921   ierr = PetscFree(p);CHKERRQ(ierr);
922   PetscFunctionReturn(0);
923 }
924 
925 static PetscErrorCode PetscPartitionerView_Simple_Ascii(PetscPartitioner part, PetscViewer viewer)
926 {
927   PetscViewerFormat format;
928   PetscErrorCode    ierr;
929 
930   PetscFunctionBegin;
931   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
932   ierr = PetscViewerASCIIPrintf(viewer, "Simple Graph Partitioner:\n");CHKERRQ(ierr);
933   PetscFunctionReturn(0);
934 }
935 
936 static PetscErrorCode PetscPartitionerView_Simple(PetscPartitioner part, PetscViewer viewer)
937 {
938   PetscBool      iascii;
939   PetscErrorCode ierr;
940 
941   PetscFunctionBegin;
942   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
943   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
944   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
945   if (iascii) {ierr = PetscPartitionerView_Simple_Ascii(part, viewer);CHKERRQ(ierr);}
946   PetscFunctionReturn(0);
947 }
948 
949 static PetscErrorCode PetscPartitionerPartition_Simple(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
950 {
951   MPI_Comm       comm;
952   PetscInt       np;
953   PetscMPIInt    size;
954   PetscErrorCode ierr;
955 
956   PetscFunctionBegin;
957   comm = PetscObjectComm((PetscObject)dm);
958   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
959   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
960   ierr = ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);CHKERRQ(ierr);
961   if (size == 1) {
962     for (np = 0; np < nparts; ++np) {ierr = PetscSectionSetDof(partSection, np, numVertices/nparts + ((numVertices % nparts) > np));CHKERRQ(ierr);}
963   }
964   else {
965     PetscMPIInt rank;
966     PetscInt nvGlobal, *offsets, myFirst, myLast;
967 
968     ierr = PetscMalloc1(size+1,&offsets);CHKERRQ(ierr);
969     offsets[0] = 0;
970     ierr = MPI_Allgather(&numVertices,1,MPIU_INT,&offsets[1],1,MPIU_INT,comm);CHKERRQ(ierr);
971     for (np = 2; np <= size; np++) {
972       offsets[np] += offsets[np-1];
973     }
974     nvGlobal = offsets[size];
975     ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
976     myFirst = offsets[rank];
977     myLast  = offsets[rank + 1] - 1;
978     ierr = PetscFree(offsets);CHKERRQ(ierr);
979     if (numVertices) {
980       PetscInt firstPart = 0, firstLargePart = 0;
981       PetscInt lastPart = 0, lastLargePart = 0;
982       PetscInt rem = nvGlobal % nparts;
983       PetscInt pSmall = nvGlobal/nparts;
984       PetscInt pBig = nvGlobal/nparts + 1;
985 
986 
987       if (rem) {
988         firstLargePart = myFirst / pBig;
989         lastLargePart  = myLast  / pBig;
990 
991         if (firstLargePart < rem) {
992           firstPart = firstLargePart;
993         }
994         else {
995           firstPart = rem + (myFirst - (rem * pBig)) / pSmall;
996         }
997         if (lastLargePart < rem) {
998           lastPart = lastLargePart;
999         }
1000         else {
1001           lastPart = rem + (myLast - (rem * pBig)) / pSmall;
1002         }
1003       }
1004       else {
1005         firstPart = myFirst / (nvGlobal/nparts);
1006         lastPart  = myLast  / (nvGlobal/nparts);
1007       }
1008 
1009       for (np = firstPart; np <= lastPart; np++) {
1010         PetscInt PartStart =  np    * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np);
1011         PetscInt PartEnd   = (np+1) * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np+1);
1012 
1013         PartStart = PetscMax(PartStart,myFirst);
1014         PartEnd   = PetscMin(PartEnd,myLast+1);
1015         ierr = PetscSectionSetDof(partSection,np,PartEnd-PartStart);CHKERRQ(ierr);
1016       }
1017     }
1018   }
1019   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1020   PetscFunctionReturn(0);
1021 }
1022 
1023 static PetscErrorCode PetscPartitionerInitialize_Simple(PetscPartitioner part)
1024 {
1025   PetscFunctionBegin;
1026   part->ops->view      = PetscPartitionerView_Simple;
1027   part->ops->destroy   = PetscPartitionerDestroy_Simple;
1028   part->ops->partition = PetscPartitionerPartition_Simple;
1029   PetscFunctionReturn(0);
1030 }
1031 
1032 /*MC
1033   PETSCPARTITIONERSIMPLE = "simple" - A PetscPartitioner object
1034 
1035   Level: intermediate
1036 
1037 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1038 M*/
1039 
1040 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Simple(PetscPartitioner part)
1041 {
1042   PetscPartitioner_Simple *p;
1043   PetscErrorCode           ierr;
1044 
1045   PetscFunctionBegin;
1046   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1047   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1048   part->data = p;
1049 
1050   ierr = PetscPartitionerInitialize_Simple(part);CHKERRQ(ierr);
1051   PetscFunctionReturn(0);
1052 }
1053 
1054 static PetscErrorCode PetscPartitionerDestroy_Gather(PetscPartitioner part)
1055 {
1056   PetscPartitioner_Gather *p = (PetscPartitioner_Gather *) part->data;
1057   PetscErrorCode          ierr;
1058 
1059   PetscFunctionBegin;
1060   ierr = PetscFree(p);CHKERRQ(ierr);
1061   PetscFunctionReturn(0);
1062 }
1063 
1064 static PetscErrorCode PetscPartitionerView_Gather_Ascii(PetscPartitioner part, PetscViewer viewer)
1065 {
1066   PetscViewerFormat format;
1067   PetscErrorCode    ierr;
1068 
1069   PetscFunctionBegin;
1070   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
1071   ierr = PetscViewerASCIIPrintf(viewer, "Gather Graph Partitioner:\n");CHKERRQ(ierr);
1072   PetscFunctionReturn(0);
1073 }
1074 
1075 static PetscErrorCode PetscPartitionerView_Gather(PetscPartitioner part, PetscViewer viewer)
1076 {
1077   PetscBool      iascii;
1078   PetscErrorCode ierr;
1079 
1080   PetscFunctionBegin;
1081   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1082   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1083   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1084   if (iascii) {ierr = PetscPartitionerView_Gather_Ascii(part, viewer);CHKERRQ(ierr);}
1085   PetscFunctionReturn(0);
1086 }
1087 
1088 static PetscErrorCode PetscPartitionerPartition_Gather(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1089 {
1090   PetscInt       np;
1091   PetscErrorCode ierr;
1092 
1093   PetscFunctionBegin;
1094   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1095   ierr = ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);CHKERRQ(ierr);
1096   ierr = PetscSectionSetDof(partSection,0,numVertices);CHKERRQ(ierr);
1097   for (np = 1; np < nparts; ++np) {ierr = PetscSectionSetDof(partSection, np, 0);CHKERRQ(ierr);}
1098   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1099   PetscFunctionReturn(0);
1100 }
1101 
1102 static PetscErrorCode PetscPartitionerInitialize_Gather(PetscPartitioner part)
1103 {
1104   PetscFunctionBegin;
1105   part->ops->view      = PetscPartitionerView_Gather;
1106   part->ops->destroy   = PetscPartitionerDestroy_Gather;
1107   part->ops->partition = PetscPartitionerPartition_Gather;
1108   PetscFunctionReturn(0);
1109 }
1110 
1111 /*MC
1112   PETSCPARTITIONERGATHER = "gather" - A PetscPartitioner object
1113 
1114   Level: intermediate
1115 
1116 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1117 M*/
1118 
1119 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Gather(PetscPartitioner part)
1120 {
1121   PetscPartitioner_Gather *p;
1122   PetscErrorCode           ierr;
1123 
1124   PetscFunctionBegin;
1125   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1126   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1127   part->data = p;
1128 
1129   ierr = PetscPartitionerInitialize_Gather(part);CHKERRQ(ierr);
1130   PetscFunctionReturn(0);
1131 }
1132 
1133 
1134 static PetscErrorCode PetscPartitionerDestroy_Chaco(PetscPartitioner part)
1135 {
1136   PetscPartitioner_Chaco *p = (PetscPartitioner_Chaco *) part->data;
1137   PetscErrorCode          ierr;
1138 
1139   PetscFunctionBegin;
1140   ierr = PetscFree(p);CHKERRQ(ierr);
1141   PetscFunctionReturn(0);
1142 }
1143 
1144 static PetscErrorCode PetscPartitionerView_Chaco_Ascii(PetscPartitioner part, PetscViewer viewer)
1145 {
1146   PetscViewerFormat format;
1147   PetscErrorCode    ierr;
1148 
1149   PetscFunctionBegin;
1150   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
1151   ierr = PetscViewerASCIIPrintf(viewer, "Chaco Graph Partitioner:\n");CHKERRQ(ierr);
1152   PetscFunctionReturn(0);
1153 }
1154 
1155 static PetscErrorCode PetscPartitionerView_Chaco(PetscPartitioner part, PetscViewer viewer)
1156 {
1157   PetscBool      iascii;
1158   PetscErrorCode ierr;
1159 
1160   PetscFunctionBegin;
1161   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1162   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1163   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1164   if (iascii) {ierr = PetscPartitionerView_Chaco_Ascii(part, viewer);CHKERRQ(ierr);}
1165   PetscFunctionReturn(0);
1166 }
1167 
1168 #if defined(PETSC_HAVE_CHACO)
1169 #if defined(PETSC_HAVE_UNISTD_H)
1170 #include <unistd.h>
1171 #endif
1172 #if defined(PETSC_HAVE_CHACO_INT_ASSIGNMENT)
1173 #include <chaco.h>
1174 #else
1175 /* Older versions of Chaco do not have an include file */
1176 PETSC_EXTERN int interface(int nvtxs, int *start, int *adjacency, int *vwgts,
1177                        float *ewgts, float *x, float *y, float *z, char *outassignname,
1178                        char *outfilename, short *assignment, int architecture, int ndims_tot,
1179                        int mesh_dims[3], double *goal, int global_method, int local_method,
1180                        int rqi_flag, int vmax, int ndims, double eigtol, long seed);
1181 #endif
1182 extern int FREE_GRAPH;
1183 #endif
1184 
1185 static PetscErrorCode PetscPartitionerPartition_Chaco(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1186 {
1187 #if defined(PETSC_HAVE_CHACO)
1188   enum {DEFAULT_METHOD = 1, INERTIAL_METHOD = 3};
1189   MPI_Comm       comm;
1190   int            nvtxs          = numVertices; /* number of vertices in full graph */
1191   int           *vwgts          = NULL;   /* weights for all vertices */
1192   float         *ewgts          = NULL;   /* weights for all edges */
1193   float         *x              = NULL, *y = NULL, *z = NULL; /* coordinates for inertial method */
1194   char          *outassignname  = NULL;   /*  name of assignment output file */
1195   char          *outfilename    = NULL;   /* output file name */
1196   int            architecture   = 1;      /* 0 => hypercube, d => d-dimensional mesh */
1197   int            ndims_tot      = 0;      /* total number of cube dimensions to divide */
1198   int            mesh_dims[3];            /* dimensions of mesh of processors */
1199   double        *goal          = NULL;    /* desired set sizes for each set */
1200   int            global_method = 1;       /* global partitioning algorithm */
1201   int            local_method  = 1;       /* local partitioning algorithm */
1202   int            rqi_flag      = 0;       /* should I use RQI/Symmlq eigensolver? */
1203   int            vmax          = 200;     /* how many vertices to coarsen down to? */
1204   int            ndims         = 1;       /* number of eigenvectors (2^d sets) */
1205   double         eigtol        = 0.001;   /* tolerance on eigenvectors */
1206   long           seed          = 123636512; /* for random graph mutations */
1207 #if defined(PETSC_HAVE_CHACO_INT_ASSIGNMENT)
1208   int           *assignment;              /* Output partition */
1209 #else
1210   short int     *assignment;              /* Output partition */
1211 #endif
1212   int            fd_stdout, fd_pipe[2];
1213   PetscInt      *points;
1214   int            i, v, p;
1215   PetscErrorCode ierr;
1216 
1217   PetscFunctionBegin;
1218   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
1219 #if defined (PETSC_USE_DEBUG)
1220   {
1221     int ival,isum;
1222     PetscBool distributed;
1223 
1224     ival = (numVertices > 0);
1225     ierr = MPI_Allreduce(&ival, &isum, 1, MPI_INT, MPI_SUM, comm);CHKERRQ(ierr);
1226     distributed = (isum > 1) ? PETSC_TRUE : PETSC_FALSE;
1227     if (distributed) SETERRQ(comm, PETSC_ERR_SUP, "Chaco cannot partition a distributed graph");
1228   }
1229 #endif
1230   if (!numVertices) {
1231     ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1232     ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1233     ierr = ISCreateGeneral(comm, 0, NULL, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1234     PetscFunctionReturn(0);
1235   }
1236   FREE_GRAPH = 0;                         /* Do not let Chaco free my memory */
1237   for (i = 0; i < start[numVertices]; ++i) ++adjacency[i];
1238 
1239   if (global_method == INERTIAL_METHOD) {
1240     /* manager.createCellCoordinates(nvtxs, &x, &y, &z); */
1241     SETERRQ(comm, PETSC_ERR_SUP, "Inertial partitioning not yet supported");
1242   }
1243   mesh_dims[0] = nparts;
1244   mesh_dims[1] = 1;
1245   mesh_dims[2] = 1;
1246   ierr = PetscMalloc1(nvtxs, &assignment);CHKERRQ(ierr);
1247   /* Chaco outputs to stdout. We redirect this to a buffer. */
1248   /* TODO: check error codes for UNIX calls */
1249 #if defined(PETSC_HAVE_UNISTD_H)
1250   {
1251     int piperet;
1252     piperet = pipe(fd_pipe);
1253     if (piperet) SETERRQ(comm,PETSC_ERR_SYS,"Could not create pipe");
1254     fd_stdout = dup(1);
1255     close(1);
1256     dup2(fd_pipe[1], 1);
1257   }
1258 #endif
1259   ierr = interface(nvtxs, (int*) start, (int*) adjacency, vwgts, ewgts, x, y, z, outassignname, outfilename,
1260                    assignment, architecture, ndims_tot, mesh_dims, goal, global_method, local_method, rqi_flag,
1261                    vmax, ndims, eigtol, seed);
1262 #if defined(PETSC_HAVE_UNISTD_H)
1263   {
1264     char msgLog[10000];
1265     int  count;
1266 
1267     fflush(stdout);
1268     count = read(fd_pipe[0], msgLog, (10000-1)*sizeof(char));
1269     if (count < 0) count = 0;
1270     msgLog[count] = 0;
1271     close(1);
1272     dup2(fd_stdout, 1);
1273     close(fd_stdout);
1274     close(fd_pipe[0]);
1275     close(fd_pipe[1]);
1276     if (ierr) SETERRQ1(comm, PETSC_ERR_LIB, "Error in Chaco library: %s", msgLog);
1277   }
1278 #else
1279   if (ierr) SETERRQ1(comm, PETSC_ERR_LIB, "Error in Chaco library: %s", "error in stdout");
1280 #endif
1281   /* Convert to PetscSection+IS */
1282   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1283   for (v = 0; v < nvtxs; ++v) {
1284     ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);
1285   }
1286   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1287   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
1288   for (p = 0, i = 0; p < nparts; ++p) {
1289     for (v = 0; v < nvtxs; ++v) {
1290       if (assignment[v] == p) points[i++] = v;
1291     }
1292   }
1293   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1294   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1295   if (global_method == INERTIAL_METHOD) {
1296     /* manager.destroyCellCoordinates(nvtxs, &x, &y, &z); */
1297   }
1298   ierr = PetscFree(assignment);CHKERRQ(ierr);
1299   for (i = 0; i < start[numVertices]; ++i) --adjacency[i];
1300   PetscFunctionReturn(0);
1301 #else
1302   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-chaco.");
1303 #endif
1304 }
1305 
1306 static PetscErrorCode PetscPartitionerInitialize_Chaco(PetscPartitioner part)
1307 {
1308   PetscFunctionBegin;
1309   part->ops->view      = PetscPartitionerView_Chaco;
1310   part->ops->destroy   = PetscPartitionerDestroy_Chaco;
1311   part->ops->partition = PetscPartitionerPartition_Chaco;
1312   PetscFunctionReturn(0);
1313 }
1314 
1315 /*MC
1316   PETSCPARTITIONERCHACO = "chaco" - A PetscPartitioner object using the Chaco library
1317 
1318   Level: intermediate
1319 
1320 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1321 M*/
1322 
1323 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Chaco(PetscPartitioner part)
1324 {
1325   PetscPartitioner_Chaco *p;
1326   PetscErrorCode          ierr;
1327 
1328   PetscFunctionBegin;
1329   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1330   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1331   part->data = p;
1332 
1333   ierr = PetscPartitionerInitialize_Chaco(part);CHKERRQ(ierr);
1334   ierr = PetscCitationsRegister(ChacoPartitionerCitation, &ChacoPartitionercite);CHKERRQ(ierr);
1335   PetscFunctionReturn(0);
1336 }
1337 
1338 static PetscErrorCode PetscPartitionerDestroy_ParMetis(PetscPartitioner part)
1339 {
1340   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
1341   PetscErrorCode             ierr;
1342 
1343   PetscFunctionBegin;
1344   ierr = PetscFree(p);CHKERRQ(ierr);
1345   PetscFunctionReturn(0);
1346 }
1347 
1348 static PetscErrorCode PetscPartitionerView_ParMetis_Ascii(PetscPartitioner part, PetscViewer viewer)
1349 {
1350   PetscViewerFormat format;
1351   PetscErrorCode    ierr;
1352 
1353   PetscFunctionBegin;
1354   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
1355   ierr = PetscViewerASCIIPrintf(viewer, "ParMetis Graph Partitioner:\n");CHKERRQ(ierr);
1356   PetscFunctionReturn(0);
1357 }
1358 
1359 static PetscErrorCode PetscPartitionerView_ParMetis(PetscPartitioner part, PetscViewer viewer)
1360 {
1361   PetscBool      iascii;
1362   PetscErrorCode ierr;
1363 
1364   PetscFunctionBegin;
1365   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1366   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1367   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1368   if (iascii) {ierr = PetscPartitionerView_ParMetis_Ascii(part, viewer);CHKERRQ(ierr);}
1369   PetscFunctionReturn(0);
1370 }
1371 
1372 #if defined(PETSC_HAVE_PARMETIS)
1373 #include <parmetis.h>
1374 #endif
1375 
1376 static PetscErrorCode PetscPartitionerPartition_ParMetis(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1377 {
1378 #if defined(PETSC_HAVE_PARMETIS)
1379   MPI_Comm       comm;
1380   PetscSection   section;
1381   PetscInt       nvtxs      = numVertices; /* The number of vertices in full graph */
1382   PetscInt      *vtxdist;                  /* Distribution of vertices across processes */
1383   PetscInt      *xadj       = start;       /* Start of edge list for each vertex */
1384   PetscInt      *adjncy     = adjacency;   /* Edge lists for all vertices */
1385   PetscInt      *vwgt       = NULL;        /* Vertex weights */
1386   PetscInt      *adjwgt     = NULL;        /* Edge weights */
1387   PetscInt       wgtflag    = 0;           /* Indicates which weights are present */
1388   PetscInt       numflag    = 0;           /* Indicates initial offset (0 or 1) */
1389   PetscInt       ncon       = 1;           /* The number of weights per vertex */
1390   PetscReal     *tpwgts;                   /* The fraction of vertex weights assigned to each partition */
1391   PetscReal     *ubvec;                    /* The balance intolerance for vertex weights */
1392   PetscInt       options[5];               /* Options */
1393   /* Outputs */
1394   PetscInt       edgeCut;                  /* The number of edges cut by the partition */
1395   PetscInt      *assignment, *points;
1396   PetscMPIInt    rank, p, v, i;
1397   PetscErrorCode ierr;
1398 
1399   PetscFunctionBegin;
1400   ierr = PetscObjectGetComm((PetscObject) part, &comm);CHKERRQ(ierr);
1401   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1402   options[0] = 0; /* Use all defaults */
1403   /* Calculate vertex distribution */
1404   ierr = PetscMalloc5(nparts+1,&vtxdist,nparts*ncon,&tpwgts,ncon,&ubvec,nvtxs,&assignment,nvtxs,&vwgt);CHKERRQ(ierr);
1405   vtxdist[0] = 0;
1406   ierr = MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm);CHKERRQ(ierr);
1407   for (p = 2; p <= nparts; ++p) {
1408     vtxdist[p] += vtxdist[p-1];
1409   }
1410   /* Calculate weights */
1411   for (p = 0; p < nparts; ++p) {
1412     tpwgts[p] = 1.0/nparts;
1413   }
1414   ubvec[0] = 1.05;
1415   /* Weight cells by dofs on cell by default */
1416   ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
1417   if (section) {
1418     PetscInt cStart, cEnd, dof;
1419 
1420     ierr = DMPlexGetHeightStratum(dm, 0, &cStart, &cEnd);CHKERRQ(ierr);
1421     for (v = cStart; v < cEnd; ++v) {
1422       ierr = PetscSectionGetDof(section, v, &dof);CHKERRQ(ierr);
1423       /* WARNING: Assumes that meshes with overlap have the overlapped cells at the end of the stratum. */
1424       /* To do this properly, we should use the cell numbering created in DMPlexCreatePartitionerGraph. */
1425       if (v-cStart < numVertices) vwgt[v-cStart] = PetscMax(dof, 1);
1426     }
1427   } else {
1428     for (v = 0; v < nvtxs; ++v) vwgt[v] = 1;
1429   }
1430 
1431   if (nparts == 1) {
1432     ierr = PetscMemzero(assignment, nvtxs * sizeof(PetscInt));CHKERRQ(ierr);
1433   } else {
1434     if (vtxdist[1] == vtxdist[nparts]) {
1435       if (!rank) {
1436         PetscStackPush("METIS_PartGraphKway");
1437         ierr = METIS_PartGraphKway(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, adjwgt, &nparts, tpwgts, ubvec, NULL, &edgeCut, assignment);
1438         PetscStackPop;
1439         if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphKway()");
1440       }
1441     } else {
1442       PetscStackPush("ParMETIS_V3_PartKway");
1443       ierr = ParMETIS_V3_PartKway(vtxdist, xadj, adjncy, vwgt, adjwgt, &wgtflag, &numflag, &ncon, &nparts, tpwgts, ubvec, options, &edgeCut, assignment, &comm);
1444       PetscStackPop;
1445       if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in ParMETIS_V3_PartKway()");
1446     }
1447   }
1448   /* Convert to PetscSection+IS */
1449   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1450   for (v = 0; v < nvtxs; ++v) {ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);}
1451   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1452   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
1453   for (p = 0, i = 0; p < nparts; ++p) {
1454     for (v = 0; v < nvtxs; ++v) {
1455       if (assignment[v] == p) points[i++] = v;
1456     }
1457   }
1458   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1459   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1460   ierr = PetscFree5(vtxdist,tpwgts,ubvec,assignment,vwgt);CHKERRQ(ierr);
1461   PetscFunctionReturn(0);
1462 #else
1463   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-parmetis.");
1464 #endif
1465 }
1466 
1467 static PetscErrorCode PetscPartitionerInitialize_ParMetis(PetscPartitioner part)
1468 {
1469   PetscFunctionBegin;
1470   part->ops->view      = PetscPartitionerView_ParMetis;
1471   part->ops->destroy   = PetscPartitionerDestroy_ParMetis;
1472   part->ops->partition = PetscPartitionerPartition_ParMetis;
1473   PetscFunctionReturn(0);
1474 }
1475 
1476 /*MC
1477   PETSCPARTITIONERPARMETIS = "parmetis" - A PetscPartitioner object using the ParMetis library
1478 
1479   Level: intermediate
1480 
1481 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1482 M*/
1483 
1484 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_ParMetis(PetscPartitioner part)
1485 {
1486   PetscPartitioner_ParMetis *p;
1487   PetscErrorCode          ierr;
1488 
1489   PetscFunctionBegin;
1490   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1491   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1492   part->data = p;
1493 
1494   ierr = PetscPartitionerInitialize_ParMetis(part);CHKERRQ(ierr);
1495   ierr = PetscCitationsRegister(ParMetisPartitionerCitation, &ParMetisPartitionercite);CHKERRQ(ierr);
1496   PetscFunctionReturn(0);
1497 }
1498 
1499 /*@
1500   DMPlexGetPartitioner - Get the mesh partitioner
1501 
1502   Not collective
1503 
1504   Input Parameter:
1505 . dm - The DM
1506 
1507   Output Parameter:
1508 . part - The PetscPartitioner
1509 
1510   Level: developer
1511 
1512   Note: This gets a borrowed reference, so the user should not destroy this PetscPartitioner.
1513 
1514 .seealso DMPlexDistribute(), DMPlexSetPartitioner(), PetscPartitionerCreate()
1515 @*/
1516 PetscErrorCode DMPlexGetPartitioner(DM dm, PetscPartitioner *part)
1517 {
1518   DM_Plex *mesh = (DM_Plex *) dm->data;
1519 
1520   PetscFunctionBegin;
1521   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1522   PetscValidPointer(part, 2);
1523   *part = mesh->partitioner;
1524   PetscFunctionReturn(0);
1525 }
1526 
1527 /*@
1528   DMPlexSetPartitioner - Set the mesh partitioner
1529 
1530   logically collective on dm and part
1531 
1532   Input Parameters:
1533 + dm - The DM
1534 - part - The partitioner
1535 
1536   Level: developer
1537 
1538   Note: Any existing PetscPartitioner will be destroyed.
1539 
1540 .seealso DMPlexDistribute(), DMPlexGetPartitioner(), PetscPartitionerCreate()
1541 @*/
1542 PetscErrorCode DMPlexSetPartitioner(DM dm, PetscPartitioner part)
1543 {
1544   DM_Plex       *mesh = (DM_Plex *) dm->data;
1545   PetscErrorCode ierr;
1546 
1547   PetscFunctionBegin;
1548   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1549   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 2);
1550   ierr = PetscObjectReference((PetscObject)part);CHKERRQ(ierr);
1551   ierr = PetscPartitionerDestroy(&mesh->partitioner);CHKERRQ(ierr);
1552   mesh->partitioner = part;
1553   PetscFunctionReturn(0);
1554 }
1555 
1556 static PetscErrorCode DMPlexPartitionLabelClosure_Tree(DM dm, DMLabel label, PetscInt rank, PetscInt point, PetscBool up, PetscBool down)
1557 {
1558   PetscErrorCode ierr;
1559 
1560   PetscFunctionBegin;
1561   if (up) {
1562     PetscInt parent;
1563 
1564     ierr = DMPlexGetTreeParent(dm,point,&parent,NULL);CHKERRQ(ierr);
1565     if (parent != point) {
1566       PetscInt closureSize, *closure = NULL, i;
1567 
1568       ierr = DMPlexGetTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);CHKERRQ(ierr);
1569       for (i = 0; i < closureSize; i++) {
1570         PetscInt cpoint = closure[2*i];
1571 
1572         ierr = DMLabelSetValue(label,cpoint,rank);CHKERRQ(ierr);
1573         ierr = DMPlexPartitionLabelClosure_Tree(dm,label,rank,cpoint,PETSC_TRUE,PETSC_FALSE);CHKERRQ(ierr);
1574       }
1575       ierr = DMPlexRestoreTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);CHKERRQ(ierr);
1576     }
1577   }
1578   if (down) {
1579     PetscInt numChildren;
1580     const PetscInt *children;
1581 
1582     ierr = DMPlexGetTreeChildren(dm,point,&numChildren,&children);CHKERRQ(ierr);
1583     if (numChildren) {
1584       PetscInt i;
1585 
1586       for (i = 0; i < numChildren; i++) {
1587         PetscInt cpoint = children[i];
1588 
1589         ierr = DMLabelSetValue(label,cpoint,rank);CHKERRQ(ierr);
1590         ierr = DMPlexPartitionLabelClosure_Tree(dm,label,rank,cpoint,PETSC_FALSE,PETSC_TRUE);CHKERRQ(ierr);
1591       }
1592     }
1593   }
1594   PetscFunctionReturn(0);
1595 }
1596 
1597 /*@
1598   DMPlexPartitionLabelClosure - Add the closure of all points to the partition label
1599 
1600   Input Parameters:
1601 + dm     - The DM
1602 - label  - DMLabel assinging ranks to remote roots
1603 
1604   Level: developer
1605 
1606 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1607 @*/
1608 PetscErrorCode DMPlexPartitionLabelClosure(DM dm, DMLabel label)
1609 {
1610   IS              rankIS,   pointIS;
1611   const PetscInt *ranks,   *points;
1612   PetscInt        numRanks, numPoints, r, p, c, closureSize;
1613   PetscInt       *closure = NULL;
1614   PetscErrorCode  ierr;
1615 
1616   PetscFunctionBegin;
1617   ierr = DMLabelGetValueIS(label, &rankIS);CHKERRQ(ierr);
1618   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
1619   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
1620   for (r = 0; r < numRanks; ++r) {
1621     const PetscInt rank = ranks[r];
1622 
1623     ierr = DMLabelGetStratumIS(label, rank, &pointIS);CHKERRQ(ierr);
1624     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
1625     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
1626     for (p = 0; p < numPoints; ++p) {
1627       ierr = DMPlexGetTransitiveClosure(dm, points[p], PETSC_TRUE, &closureSize, &closure);CHKERRQ(ierr);
1628       for (c = 0; c < closureSize*2; c += 2) {
1629         ierr = DMLabelSetValue(label, closure[c], rank);CHKERRQ(ierr);
1630         ierr = DMPlexPartitionLabelClosure_Tree(dm,label,rank,closure[c],PETSC_TRUE,PETSC_TRUE);CHKERRQ(ierr);
1631       }
1632     }
1633     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
1634     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1635   }
1636   if (closure) {ierr = DMPlexRestoreTransitiveClosure(dm, 0, PETSC_TRUE, &closureSize, &closure);CHKERRQ(ierr);}
1637   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
1638   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
1639   PetscFunctionReturn(0);
1640 }
1641 
1642 /*@
1643   DMPlexPartitionLabelAdjacency - Add one level of adjacent points to the partition label
1644 
1645   Input Parameters:
1646 + dm     - The DM
1647 - label  - DMLabel assinging ranks to remote roots
1648 
1649   Level: developer
1650 
1651 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1652 @*/
1653 PetscErrorCode DMPlexPartitionLabelAdjacency(DM dm, DMLabel label)
1654 {
1655   IS              rankIS,   pointIS;
1656   const PetscInt *ranks,   *points;
1657   PetscInt        numRanks, numPoints, r, p, a, adjSize;
1658   PetscInt       *adj = NULL;
1659   PetscErrorCode  ierr;
1660 
1661   PetscFunctionBegin;
1662   ierr = DMLabelGetValueIS(label, &rankIS);CHKERRQ(ierr);
1663   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
1664   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
1665   for (r = 0; r < numRanks; ++r) {
1666     const PetscInt rank = ranks[r];
1667 
1668     ierr = DMLabelGetStratumIS(label, rank, &pointIS);CHKERRQ(ierr);
1669     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
1670     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
1671     for (p = 0; p < numPoints; ++p) {
1672       adjSize = PETSC_DETERMINE;
1673       ierr = DMPlexGetAdjacency(dm, points[p], &adjSize, &adj);CHKERRQ(ierr);
1674       for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(label, adj[a], rank);CHKERRQ(ierr);}
1675     }
1676     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
1677     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1678   }
1679   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
1680   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
1681   ierr = PetscFree(adj);CHKERRQ(ierr);
1682   PetscFunctionReturn(0);
1683 }
1684 
1685 /*@
1686   DMPlexPartitionLabelPropagate - Propagate points in a partition label over the point SF
1687 
1688   Input Parameters:
1689 + dm     - The DM
1690 - label  - DMLabel assinging ranks to remote roots
1691 
1692   Level: developer
1693 
1694   Note: This is required when generating multi-level overlaps to capture
1695   overlap points from non-neighbouring partitions.
1696 
1697 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1698 @*/
1699 PetscErrorCode DMPlexPartitionLabelPropagate(DM dm, DMLabel label)
1700 {
1701   MPI_Comm        comm;
1702   PetscMPIInt     rank;
1703   PetscSF         sfPoint;
1704   DMLabel         lblRoots, lblLeaves;
1705   IS              rankIS, pointIS;
1706   const PetscInt *ranks;
1707   PetscInt        numRanks, r;
1708   PetscErrorCode  ierr;
1709 
1710   PetscFunctionBegin;
1711   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
1712   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1713   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
1714   /* Pull point contributions from remote leaves into local roots */
1715   ierr = DMLabelGather(label, sfPoint, &lblLeaves);CHKERRQ(ierr);
1716   ierr = DMLabelGetValueIS(lblLeaves, &rankIS);CHKERRQ(ierr);
1717   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
1718   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
1719   for (r = 0; r < numRanks; ++r) {
1720     const PetscInt remoteRank = ranks[r];
1721     if (remoteRank == rank) continue;
1722     ierr = DMLabelGetStratumIS(lblLeaves, remoteRank, &pointIS);CHKERRQ(ierr);
1723     ierr = DMLabelInsertIS(label, pointIS, remoteRank);CHKERRQ(ierr);
1724     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1725   }
1726   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
1727   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
1728   ierr = DMLabelDestroy(&lblLeaves);CHKERRQ(ierr);
1729   /* Push point contributions from roots into remote leaves */
1730   ierr = DMLabelDistribute(label, sfPoint, &lblRoots);CHKERRQ(ierr);
1731   ierr = DMLabelGetValueIS(lblRoots, &rankIS);CHKERRQ(ierr);
1732   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
1733   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
1734   for (r = 0; r < numRanks; ++r) {
1735     const PetscInt remoteRank = ranks[r];
1736     if (remoteRank == rank) continue;
1737     ierr = DMLabelGetStratumIS(lblRoots, remoteRank, &pointIS);CHKERRQ(ierr);
1738     ierr = DMLabelInsertIS(label, pointIS, remoteRank);CHKERRQ(ierr);
1739     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1740   }
1741   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
1742   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
1743   ierr = DMLabelDestroy(&lblRoots);CHKERRQ(ierr);
1744   PetscFunctionReturn(0);
1745 }
1746 
1747 /*@
1748   DMPlexPartitionLabelInvert - Create a partition label of remote roots from a local root label
1749 
1750   Input Parameters:
1751 + dm        - The DM
1752 . rootLabel - DMLabel assinging ranks to local roots
1753 . processSF - A star forest mapping into the local index on each remote rank
1754 
1755   Output Parameter:
1756 - leafLabel - DMLabel assinging ranks to remote roots
1757 
1758   Note: The rootLabel defines a send pattern by mapping local points to remote target ranks. The
1759   resulting leafLabel is a receiver mapping of remote roots to their parent rank.
1760 
1761   Level: developer
1762 
1763 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1764 @*/
1765 PetscErrorCode DMPlexPartitionLabelInvert(DM dm, DMLabel rootLabel, PetscSF processSF, DMLabel leafLabel)
1766 {
1767   MPI_Comm           comm;
1768   PetscMPIInt        rank, size;
1769   PetscInt           p, n, numNeighbors, ssize, l, nleaves;
1770   PetscSF            sfPoint;
1771   PetscSFNode       *rootPoints, *leafPoints;
1772   PetscSection       rootSection, leafSection;
1773   const PetscSFNode *remote;
1774   const PetscInt    *local, *neighbors;
1775   IS                 valueIS;
1776   PetscErrorCode     ierr;
1777 
1778   PetscFunctionBegin;
1779   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
1780   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1781   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
1782   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
1783 
1784   /* Convert to (point, rank) and use actual owners */
1785   ierr = PetscSectionCreate(comm, &rootSection);CHKERRQ(ierr);
1786   ierr = PetscSectionSetChart(rootSection, 0, size);CHKERRQ(ierr);
1787   ierr = DMLabelGetValueIS(rootLabel, &valueIS);CHKERRQ(ierr);
1788   ierr = ISGetLocalSize(valueIS, &numNeighbors);CHKERRQ(ierr);
1789   ierr = ISGetIndices(valueIS, &neighbors);CHKERRQ(ierr);
1790   for (n = 0; n < numNeighbors; ++n) {
1791     PetscInt numPoints;
1792 
1793     ierr = DMLabelGetStratumSize(rootLabel, neighbors[n], &numPoints);CHKERRQ(ierr);
1794     ierr = PetscSectionAddDof(rootSection, neighbors[n], numPoints);CHKERRQ(ierr);
1795   }
1796   ierr = PetscSectionSetUp(rootSection);CHKERRQ(ierr);
1797   ierr = PetscSectionGetStorageSize(rootSection, &ssize);CHKERRQ(ierr);
1798   ierr = PetscMalloc1(ssize, &rootPoints);CHKERRQ(ierr);
1799   ierr = PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);CHKERRQ(ierr);
1800   for (n = 0; n < numNeighbors; ++n) {
1801     IS              pointIS;
1802     const PetscInt *points;
1803     PetscInt        off, numPoints, p;
1804 
1805     ierr = PetscSectionGetOffset(rootSection, neighbors[n], &off);CHKERRQ(ierr);
1806     ierr = DMLabelGetStratumIS(rootLabel, neighbors[n], &pointIS);CHKERRQ(ierr);
1807     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
1808     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
1809     for (p = 0; p < numPoints; ++p) {
1810       if (local) {ierr = PetscFindInt(points[p], nleaves, local, &l);CHKERRQ(ierr);}
1811       else       {l = -1;}
1812       if (l >= 0) {rootPoints[off+p] = remote[l];}
1813       else        {rootPoints[off+p].index = points[p]; rootPoints[off+p].rank = rank;}
1814     }
1815     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
1816     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1817   }
1818   ierr = ISRestoreIndices(valueIS, &neighbors);CHKERRQ(ierr);
1819   ierr = ISDestroy(&valueIS);CHKERRQ(ierr);
1820   /* Communicate overlap */
1821   ierr = PetscSectionCreate(comm, &leafSection);CHKERRQ(ierr);
1822   ierr = DMPlexDistributeData(dm, processSF, rootSection, MPIU_2INT, rootPoints, leafSection, (void**) &leafPoints);CHKERRQ(ierr);
1823   /* Filter remote contributions (ovLeafPoints) into the overlapSF */
1824   ierr = PetscSectionGetStorageSize(leafSection, &ssize);CHKERRQ(ierr);
1825   for (p = 0; p < ssize; p++) {
1826     ierr = DMLabelSetValue(leafLabel, leafPoints[p].index, leafPoints[p].rank);CHKERRQ(ierr);
1827   }
1828   ierr = PetscFree(rootPoints);CHKERRQ(ierr);
1829   ierr = PetscSectionDestroy(&rootSection);CHKERRQ(ierr);
1830   ierr = PetscFree(leafPoints);CHKERRQ(ierr);
1831   ierr = PetscSectionDestroy(&leafSection);CHKERRQ(ierr);
1832   PetscFunctionReturn(0);
1833 }
1834 
1835 /*@
1836   DMPlexPartitionLabelCreateSF - Create a star forest from a label that assigns ranks to points
1837 
1838   Input Parameters:
1839 + dm    - The DM
1840 . label - DMLabel assinging ranks to remote roots
1841 
1842   Output Parameter:
1843 - sf    - The star forest communication context encapsulating the defined mapping
1844 
1845   Note: The incoming label is a receiver mapping of remote points to their parent rank.
1846 
1847   Level: developer
1848 
1849 .seealso: DMPlexDistribute(), DMPlexCreateOverlap
1850 @*/
1851 PetscErrorCode DMPlexPartitionLabelCreateSF(DM dm, DMLabel label, PetscSF *sf)
1852 {
1853   PetscMPIInt     rank, size;
1854   PetscInt        n, numRemote, p, numPoints, pStart, pEnd, idx = 0;
1855   PetscSFNode    *remotePoints;
1856   IS              remoteRootIS;
1857   const PetscInt *remoteRoots;
1858   PetscErrorCode ierr;
1859 
1860   PetscFunctionBegin;
1861   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
1862   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &size);CHKERRQ(ierr);
1863 
1864   for (numRemote = 0, n = 0; n < size; ++n) {
1865     ierr = DMLabelGetStratumSize(label, n, &numPoints);CHKERRQ(ierr);
1866     numRemote += numPoints;
1867   }
1868   ierr = PetscMalloc1(numRemote, &remotePoints);CHKERRQ(ierr);
1869   /* Put owned points first */
1870   ierr = DMLabelGetStratumSize(label, rank, &numPoints);CHKERRQ(ierr);
1871   if (numPoints > 0) {
1872     ierr = DMLabelGetStratumIS(label, rank, &remoteRootIS);CHKERRQ(ierr);
1873     ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
1874     for (p = 0; p < numPoints; p++) {
1875       remotePoints[idx].index = remoteRoots[p];
1876       remotePoints[idx].rank = rank;
1877       idx++;
1878     }
1879     ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
1880     ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr);
1881   }
1882   /* Now add remote points */
1883   for (n = 0; n < size; ++n) {
1884     ierr = DMLabelGetStratumSize(label, n, &numPoints);CHKERRQ(ierr);
1885     if (numPoints <= 0 || n == rank) continue;
1886     ierr = DMLabelGetStratumIS(label, n, &remoteRootIS);CHKERRQ(ierr);
1887     ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
1888     for (p = 0; p < numPoints; p++) {
1889       remotePoints[idx].index = remoteRoots[p];
1890       remotePoints[idx].rank = n;
1891       idx++;
1892     }
1893     ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
1894     ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr);
1895   }
1896   ierr = PetscSFCreate(PetscObjectComm((PetscObject) dm), sf);CHKERRQ(ierr);
1897   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
1898   ierr = PetscSFSetGraph(*sf, pEnd-pStart, numRemote, NULL, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);CHKERRQ(ierr);
1899   PetscFunctionReturn(0);
1900 }
1901