xref: /petsc/src/dm/impls/plex/plexpartition.c (revision 8bee694737bd46ce8c9967ab70009a1b28e47bf2)
1 #include <petsc/private/dmpleximpl.h>   /*I      "petscdmplex.h"   I*/
2 
3 PetscClassId PETSCPARTITIONER_CLASSID = 0;
4 
5 PetscFunctionList PetscPartitionerList              = NULL;
6 PetscBool         PetscPartitionerRegisterAllCalled = PETSC_FALSE;
7 
8 PetscBool ChacoPartitionercite = PETSC_FALSE;
9 const char ChacoPartitionerCitation[] = "@inproceedings{Chaco95,\n"
10                                "  author    = {Bruce Hendrickson and Robert Leland},\n"
11                                "  title     = {A multilevel algorithm for partitioning graphs},\n"
12                                "  booktitle = {Supercomputing '95: Proceedings of the 1995 ACM/IEEE Conference on Supercomputing (CDROM)},"
13                                "  isbn      = {0-89791-816-9},\n"
14                                "  pages     = {28},\n"
15                                "  doi       = {http://doi.acm.org/10.1145/224170.224228},\n"
16                                "  publisher = {ACM Press},\n"
17                                "  address   = {New York},\n"
18                                "  year      = {1995}\n}\n";
19 
20 PetscBool ParMetisPartitionercite = PETSC_FALSE;
21 const char ParMetisPartitionerCitation[] = "@article{KarypisKumar98,\n"
22                                "  author  = {George Karypis and Vipin Kumar},\n"
23                                "  title   = {A Parallel Algorithm for Multilevel Graph Partitioning and Sparse Matrix Ordering},\n"
24                                "  journal = {Journal of Parallel and Distributed Computing},\n"
25                                "  volume  = {48},\n"
26                                "  pages   = {71--85},\n"
27                                "  year    = {1998}\n}\n";
28 
29 /*@C
30   DMPlexCreatePartitionerGraph - Create a CSR graph of point connections for the partitioner
31 
32   Input Parameters:
33 + dm      - The mesh DM dm
34 - height  - Height of the strata from which to construct the graph
35 
36   Output Parameter:
37 + numVertices     - Number of vertices in the graph
38 . offsets         - Point offsets in the graph
39 . adjacency       - Point connectivity in the graph
40 - globalNumbering - A map from the local cell numbering to the global numbering used in "adjacency".  Negative indicates that the cell is a duplicate from another process.
41 
42   The user can control the definition of adjacency for the mesh using DMPlexGetAdjacencyUseCone() and
43   DMPlexSetAdjacencyUseClosure(). They should choose the combination appropriate for the function
44   representation on the mesh.
45 
46   Level: developer
47 
48 .seealso: PetscPartitionerGetType(), PetscPartitionerCreate(), DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure()
49 @*/
50 PetscErrorCode DMPlexCreatePartitionerGraph(DM dm, PetscInt height, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency, IS *globalNumbering)
51 {
52   PetscInt       p, pStart, pEnd, a, adjSize, idx, size, nroots;
53   PetscInt      *adj = NULL, *vOffsets = NULL, *graph = NULL;
54   IS             cellNumbering;
55   const PetscInt *cellNum;
56   PetscBool      useCone, useClosure;
57   PetscSection   section;
58   PetscSegBuffer adjBuffer;
59   PetscSF        sfPoint;
60   PetscMPIInt    rank;
61   PetscErrorCode ierr;
62 
63   PetscFunctionBegin;
64   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
65   ierr = DMPlexGetHeightStratum(dm, height, &pStart, &pEnd);CHKERRQ(ierr);
66   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
67   ierr = PetscSFGetGraph(sfPoint, &nroots, NULL, NULL, NULL);CHKERRQ(ierr);
68   /* Build adjacency graph via a section/segbuffer */
69   ierr = PetscSectionCreate(PetscObjectComm((PetscObject) dm), &section);CHKERRQ(ierr);
70   ierr = PetscSectionSetChart(section, pStart, pEnd);CHKERRQ(ierr);
71   ierr = PetscSegBufferCreate(sizeof(PetscInt),1000,&adjBuffer);CHKERRQ(ierr);
72   /* Always use FVM adjacency to create partitioner graph */
73   ierr = DMPlexGetAdjacencyUseCone(dm, &useCone);CHKERRQ(ierr);
74   ierr = DMPlexGetAdjacencyUseClosure(dm, &useClosure);CHKERRQ(ierr);
75   ierr = DMPlexSetAdjacencyUseCone(dm, PETSC_TRUE);CHKERRQ(ierr);
76   ierr = DMPlexSetAdjacencyUseClosure(dm, PETSC_FALSE);CHKERRQ(ierr);
77   ierr = DMPlexCreateCellNumbering_Internal(dm, PETSC_TRUE, &cellNumbering);CHKERRQ(ierr);
78   if (globalNumbering) {
79     ierr = PetscObjectReference((PetscObject)cellNumbering);CHKERRQ(ierr);
80     *globalNumbering = cellNumbering;
81   }
82   ierr = ISGetIndices(cellNumbering, &cellNum);CHKERRQ(ierr);
83   for (*numVertices = 0, p = pStart; p < pEnd; p++) {
84     /* Skip non-owned cells in parallel (ParMetis expects no overlap) */
85     if (nroots > 0) {if (cellNum[p] < 0) continue;}
86     adjSize = PETSC_DETERMINE;
87     ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
88     for (a = 0; a < adjSize; ++a) {
89       const PetscInt point = adj[a];
90       if (point != p && pStart <= point && point < pEnd) {
91         PetscInt *PETSC_RESTRICT pBuf;
92         ierr = PetscSectionAddDof(section, p, 1);CHKERRQ(ierr);
93         ierr = PetscSegBufferGetInts(adjBuffer, 1, &pBuf);CHKERRQ(ierr);
94         *pBuf = point;
95       }
96     }
97     (*numVertices)++;
98   }
99   ierr = DMPlexSetAdjacencyUseCone(dm, useCone);CHKERRQ(ierr);
100   ierr = DMPlexSetAdjacencyUseClosure(dm, useClosure);CHKERRQ(ierr);
101   /* Derive CSR graph from section/segbuffer */
102   ierr = PetscSectionSetUp(section);CHKERRQ(ierr);
103   ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr);
104   ierr = PetscMalloc1(*numVertices+1, &vOffsets);CHKERRQ(ierr);
105   for (idx = 0, p = pStart; p < pEnd; p++) {
106     if (nroots > 0) {if (cellNum[p] < 0) continue;}
107     ierr = PetscSectionGetOffset(section, p, &(vOffsets[idx++]));CHKERRQ(ierr);
108   }
109   vOffsets[*numVertices] = size;
110   if (offsets) *offsets = vOffsets;
111   ierr = PetscSegBufferExtractAlloc(adjBuffer, &graph);CHKERRQ(ierr);
112   {
113     ISLocalToGlobalMapping ltogCells;
114     PetscInt n, size, *cells_arr;
115     /* In parallel, apply a global cell numbering to the graph */
116     ierr = ISRestoreIndices(cellNumbering, &cellNum);CHKERRQ(ierr);
117     ierr = ISLocalToGlobalMappingCreateIS(cellNumbering, &ltogCells);CHKERRQ(ierr);
118     ierr = ISLocalToGlobalMappingGetSize(ltogCells, &size);CHKERRQ(ierr);
119     ierr = ISLocalToGlobalMappingGetIndices(ltogCells, (const PetscInt**)&cells_arr);CHKERRQ(ierr);
120     /* Convert to positive global cell numbers */
121     for (n=0; n<size; n++) {if (cells_arr[n] < 0) cells_arr[n] = -(cells_arr[n]+1);}
122     ierr = ISLocalToGlobalMappingRestoreIndices(ltogCells, (const PetscInt**)&cells_arr);CHKERRQ(ierr);
123     ierr = ISLocalToGlobalMappingApplyBlock(ltogCells, vOffsets[*numVertices], graph, graph);CHKERRQ(ierr);
124     ierr = ISLocalToGlobalMappingDestroy(&ltogCells);CHKERRQ(ierr);
125     ierr = ISDestroy(&cellNumbering);CHKERRQ(ierr);
126   }
127   if (adjacency) *adjacency = graph;
128   /* Clean up */
129   ierr = PetscSegBufferDestroy(&adjBuffer);CHKERRQ(ierr);
130   ierr = PetscSectionDestroy(&section);CHKERRQ(ierr);
131   ierr = PetscFree(adj);CHKERRQ(ierr);
132   PetscFunctionReturn(0);
133 }
134 
135 /*@C
136   DMPlexCreateNeighborCSR - Create a mesh graph (cell-cell adjacency) in parallel CSR format.
137 
138   Collective
139 
140   Input Arguments:
141 + dm - The DMPlex
142 - cellHeight - The height of mesh points to treat as cells (default should be 0)
143 
144   Output Arguments:
145 + numVertices - The number of local vertices in the graph, or cells in the mesh.
146 . offsets     - The offset to the adjacency list for each cell
147 - adjacency   - The adjacency list for all cells
148 
149   Note: This is suitable for input to a mesh partitioner like ParMetis.
150 
151   Level: advanced
152 
153 .seealso: DMPlexCreate()
154 @*/
155 PetscErrorCode DMPlexCreateNeighborCSR(DM dm, PetscInt cellHeight, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency)
156 {
157   const PetscInt maxFaceCases = 30;
158   PetscInt       numFaceCases = 0;
159   PetscInt       numFaceVertices[30]; /* maxFaceCases, C89 sucks sucks sucks */
160   PetscInt      *off, *adj;
161   PetscInt      *neighborCells = NULL;
162   PetscInt       dim, cellDim, depth = 0, faceDepth, cStart, cEnd, c, numCells, cell;
163   PetscErrorCode ierr;
164 
165   PetscFunctionBegin;
166   /* For parallel partitioning, I think you have to communicate supports */
167   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
168   cellDim = dim - cellHeight;
169   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
170   ierr = DMPlexGetHeightStratum(dm, cellHeight, &cStart, &cEnd);CHKERRQ(ierr);
171   if (cEnd - cStart == 0) {
172     if (numVertices) *numVertices = 0;
173     if (offsets)   *offsets   = NULL;
174     if (adjacency) *adjacency = NULL;
175     PetscFunctionReturn(0);
176   }
177   numCells  = cEnd - cStart;
178   faceDepth = depth - cellHeight;
179   if (dim == depth) {
180     PetscInt f, fStart, fEnd;
181 
182     ierr = PetscCalloc1(numCells+1, &off);CHKERRQ(ierr);
183     /* Count neighboring cells */
184     ierr = DMPlexGetHeightStratum(dm, cellHeight+1, &fStart, &fEnd);CHKERRQ(ierr);
185     for (f = fStart; f < fEnd; ++f) {
186       const PetscInt *support;
187       PetscInt        supportSize;
188       ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
189       ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
190       if (supportSize == 2) {
191         PetscInt numChildren;
192 
193         ierr = DMPlexGetTreeChildren(dm,f,&numChildren,NULL);CHKERRQ(ierr);
194         if (!numChildren) {
195           ++off[support[0]-cStart+1];
196           ++off[support[1]-cStart+1];
197         }
198       }
199     }
200     /* Prefix sum */
201     for (c = 1; c <= numCells; ++c) off[c] += off[c-1];
202     if (adjacency) {
203       PetscInt *tmp;
204 
205       ierr = PetscMalloc1(off[numCells], &adj);CHKERRQ(ierr);
206       ierr = PetscMalloc1(numCells+1, &tmp);CHKERRQ(ierr);
207       ierr = PetscMemcpy(tmp, off, (numCells+1) * sizeof(PetscInt));CHKERRQ(ierr);
208       /* Get neighboring cells */
209       for (f = fStart; f < fEnd; ++f) {
210         const PetscInt *support;
211         PetscInt        supportSize;
212         ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
213         ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
214         if (supportSize == 2) {
215           PetscInt numChildren;
216 
217           ierr = DMPlexGetTreeChildren(dm,f,&numChildren,NULL);CHKERRQ(ierr);
218           if (!numChildren) {
219             adj[tmp[support[0]-cStart]++] = support[1];
220             adj[tmp[support[1]-cStart]++] = support[0];
221           }
222         }
223       }
224 #if defined(PETSC_USE_DEBUG)
225       for (c = 0; c < cEnd-cStart; ++c) if (tmp[c] != off[c+1]) SETERRQ3(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Offset %d != %d for cell %d", tmp[c], off[c], c+cStart);
226 #endif
227       ierr = PetscFree(tmp);CHKERRQ(ierr);
228     }
229     if (numVertices) *numVertices = numCells;
230     if (offsets)   *offsets   = off;
231     if (adjacency) *adjacency = adj;
232     PetscFunctionReturn(0);
233   }
234   /* Setup face recognition */
235   if (faceDepth == 1) {
236     PetscInt cornersSeen[30] = {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0}; /* Could use PetscBT */
237 
238     for (c = cStart; c < cEnd; ++c) {
239       PetscInt corners;
240 
241       ierr = DMPlexGetConeSize(dm, c, &corners);CHKERRQ(ierr);
242       if (!cornersSeen[corners]) {
243         PetscInt nFV;
244 
245         if (numFaceCases >= maxFaceCases) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_PLIB, "Exceeded maximum number of face recognition cases");
246         cornersSeen[corners] = 1;
247 
248         ierr = DMPlexGetNumFaceVertices(dm, cellDim, corners, &nFV);CHKERRQ(ierr);
249 
250         numFaceVertices[numFaceCases++] = nFV;
251       }
252     }
253   }
254   ierr = PetscCalloc1(numCells+1, &off);CHKERRQ(ierr);
255   /* Count neighboring cells */
256   for (cell = cStart; cell < cEnd; ++cell) {
257     PetscInt numNeighbors = PETSC_DETERMINE, n;
258 
259     ierr = DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);CHKERRQ(ierr);
260     /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
261     for (n = 0; n < numNeighbors; ++n) {
262       PetscInt        cellPair[2];
263       PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
264       PetscInt        meetSize = 0;
265       const PetscInt *meet    = NULL;
266 
267       cellPair[0] = cell; cellPair[1] = neighborCells[n];
268       if (cellPair[0] == cellPair[1]) continue;
269       if (!found) {
270         ierr = DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
271         if (meetSize) {
272           PetscInt f;
273 
274           for (f = 0; f < numFaceCases; ++f) {
275             if (numFaceVertices[f] == meetSize) {
276               found = PETSC_TRUE;
277               break;
278             }
279           }
280         }
281         ierr = DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
282       }
283       if (found) ++off[cell-cStart+1];
284     }
285   }
286   /* Prefix sum */
287   for (cell = 1; cell <= numCells; ++cell) off[cell] += off[cell-1];
288 
289   if (adjacency) {
290     ierr = PetscMalloc1(off[numCells], &adj);CHKERRQ(ierr);
291     /* Get neighboring cells */
292     for (cell = cStart; cell < cEnd; ++cell) {
293       PetscInt numNeighbors = PETSC_DETERMINE, n;
294       PetscInt cellOffset   = 0;
295 
296       ierr = DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);CHKERRQ(ierr);
297       /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
298       for (n = 0; n < numNeighbors; ++n) {
299         PetscInt        cellPair[2];
300         PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
301         PetscInt        meetSize = 0;
302         const PetscInt *meet    = NULL;
303 
304         cellPair[0] = cell; cellPair[1] = neighborCells[n];
305         if (cellPair[0] == cellPair[1]) continue;
306         if (!found) {
307           ierr = DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
308           if (meetSize) {
309             PetscInt f;
310 
311             for (f = 0; f < numFaceCases; ++f) {
312               if (numFaceVertices[f] == meetSize) {
313                 found = PETSC_TRUE;
314                 break;
315               }
316             }
317           }
318           ierr = DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
319         }
320         if (found) {
321           adj[off[cell-cStart]+cellOffset] = neighborCells[n];
322           ++cellOffset;
323         }
324       }
325     }
326   }
327   ierr = PetscFree(neighborCells);CHKERRQ(ierr);
328   if (numVertices) *numVertices = numCells;
329   if (offsets)   *offsets   = off;
330   if (adjacency) *adjacency = adj;
331   PetscFunctionReturn(0);
332 }
333 
334 /*@C
335   PetscPartitionerRegister - Adds a new PetscPartitioner implementation
336 
337   Not Collective
338 
339   Input Parameters:
340 + name        - The name of a new user-defined creation routine
341 - create_func - The creation routine itself
342 
343   Notes:
344   PetscPartitionerRegister() may be called multiple times to add several user-defined PetscPartitioners
345 
346   Sample usage:
347 .vb
348     PetscPartitionerRegister("my_part", MyPetscPartitionerCreate);
349 .ve
350 
351   Then, your PetscPartitioner type can be chosen with the procedural interface via
352 .vb
353     PetscPartitionerCreate(MPI_Comm, PetscPartitioner *);
354     PetscPartitionerSetType(PetscPartitioner, "my_part");
355 .ve
356    or at runtime via the option
357 .vb
358     -petscpartitioner_type my_part
359 .ve
360 
361   Level: advanced
362 
363 .keywords: PetscPartitioner, register
364 .seealso: PetscPartitionerRegisterAll(), PetscPartitionerRegisterDestroy()
365 
366 @*/
367 PetscErrorCode PetscPartitionerRegister(const char sname[], PetscErrorCode (*function)(PetscPartitioner))
368 {
369   PetscErrorCode ierr;
370 
371   PetscFunctionBegin;
372   ierr = PetscFunctionListAdd(&PetscPartitionerList, sname, function);CHKERRQ(ierr);
373   PetscFunctionReturn(0);
374 }
375 
376 /*@C
377   PetscPartitionerSetType - Builds a particular PetscPartitioner
378 
379   Collective on PetscPartitioner
380 
381   Input Parameters:
382 + part - The PetscPartitioner object
383 - name - The kind of partitioner
384 
385   Options Database Key:
386 . -petscpartitioner_type <type> - Sets the PetscPartitioner type; use -help for a list of available types
387 
388   Level: intermediate
389 
390 .keywords: PetscPartitioner, set, type
391 .seealso: PetscPartitionerGetType(), PetscPartitionerCreate()
392 @*/
393 PetscErrorCode PetscPartitionerSetType(PetscPartitioner part, PetscPartitionerType name)
394 {
395   PetscErrorCode (*r)(PetscPartitioner);
396   PetscBool      match;
397   PetscErrorCode ierr;
398 
399   PetscFunctionBegin;
400   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
401   ierr = PetscObjectTypeCompare((PetscObject) part, name, &match);CHKERRQ(ierr);
402   if (match) PetscFunctionReturn(0);
403 
404   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
405   ierr = PetscFunctionListFind(PetscPartitionerList, name, &r);CHKERRQ(ierr);
406   if (!r) SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown PetscPartitioner type: %s", name);
407 
408   if (part->ops->destroy) {
409     ierr              = (*part->ops->destroy)(part);CHKERRQ(ierr);
410     part->ops->destroy = NULL;
411   }
412   ierr = (*r)(part);CHKERRQ(ierr);
413   ierr = PetscObjectChangeTypeName((PetscObject) part, name);CHKERRQ(ierr);
414   PetscFunctionReturn(0);
415 }
416 
417 /*@C
418   PetscPartitionerGetType - Gets the PetscPartitioner type name (as a string) from the object.
419 
420   Not Collective
421 
422   Input Parameter:
423 . part - The PetscPartitioner
424 
425   Output Parameter:
426 . name - The PetscPartitioner type name
427 
428   Level: intermediate
429 
430 .keywords: PetscPartitioner, get, type, name
431 .seealso: PetscPartitionerSetType(), PetscPartitionerCreate()
432 @*/
433 PetscErrorCode PetscPartitionerGetType(PetscPartitioner part, PetscPartitionerType *name)
434 {
435   PetscErrorCode ierr;
436 
437   PetscFunctionBegin;
438   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
439   PetscValidPointer(name, 2);
440   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
441   *name = ((PetscObject) part)->type_name;
442   PetscFunctionReturn(0);
443 }
444 
445 /*@C
446   PetscPartitionerView - Views a PetscPartitioner
447 
448   Collective on PetscPartitioner
449 
450   Input Parameter:
451 + part - the PetscPartitioner object to view
452 - v    - the viewer
453 
454   Level: developer
455 
456 .seealso: PetscPartitionerDestroy()
457 @*/
458 PetscErrorCode PetscPartitionerView(PetscPartitioner part, PetscViewer v)
459 {
460   PetscErrorCode ierr;
461 
462   PetscFunctionBegin;
463   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
464   if (!v) {ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject) part), &v);CHKERRQ(ierr);}
465   if (part->ops->view) {ierr = (*part->ops->view)(part, v);CHKERRQ(ierr);}
466   PetscFunctionReturn(0);
467 }
468 
469 PetscErrorCode PetscPartitionerSetTypeFromOptions_Internal(PetscPartitioner part)
470 {
471   const char    *defaultType;
472   char           name[256];
473   PetscBool      flg;
474   PetscErrorCode ierr;
475 
476   PetscFunctionBegin;
477   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
478   if (!((PetscObject) part)->type_name)
479 #if defined(PETSC_HAVE_CHACO)
480     defaultType = PETSCPARTITIONERCHACO;
481 #elif defined(PETSC_HAVE_PARMETIS)
482     defaultType = PETSCPARTITIONERPARMETIS;
483 #else
484     defaultType = PETSCPARTITIONERSIMPLE;
485 #endif
486   else
487     defaultType = ((PetscObject) part)->type_name;
488   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
489 
490   ierr = PetscObjectOptionsBegin((PetscObject) part);CHKERRQ(ierr);
491   ierr = PetscOptionsFList("-petscpartitioner_type", "Graph partitioner", "PetscPartitionerSetType", PetscPartitionerList, defaultType, name, 256, &flg);CHKERRQ(ierr);
492   if (flg) {
493     ierr = PetscPartitionerSetType(part, name);CHKERRQ(ierr);
494   } else if (!((PetscObject) part)->type_name) {
495     ierr = PetscPartitionerSetType(part, defaultType);CHKERRQ(ierr);
496   }
497   ierr = PetscOptionsEnd();CHKERRQ(ierr);
498   PetscFunctionReturn(0);
499 }
500 
501 /*@
502   PetscPartitionerSetFromOptions - sets parameters in a PetscPartitioner from the options database
503 
504   Collective on PetscPartitioner
505 
506   Input Parameter:
507 . part - the PetscPartitioner object to set options for
508 
509   Level: developer
510 
511 .seealso: PetscPartitionerView()
512 @*/
513 PetscErrorCode PetscPartitionerSetFromOptions(PetscPartitioner part)
514 {
515   PetscErrorCode ierr;
516 
517   PetscFunctionBegin;
518   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
519   ierr = PetscPartitionerSetTypeFromOptions_Internal(part);CHKERRQ(ierr);
520 
521   ierr = PetscObjectOptionsBegin((PetscObject) part);CHKERRQ(ierr);
522   if (part->ops->setfromoptions) {ierr = (*part->ops->setfromoptions)(PetscOptionsObject,part);CHKERRQ(ierr);}
523   /* process any options handlers added with PetscObjectAddOptionsHandler() */
524   ierr = PetscObjectProcessOptionsHandlers(PetscOptionsObject,(PetscObject) part);CHKERRQ(ierr);
525   ierr = PetscOptionsEnd();CHKERRQ(ierr);
526   ierr = PetscPartitionerViewFromOptions(part, NULL, "-petscpartitioner_view");CHKERRQ(ierr);
527   PetscFunctionReturn(0);
528 }
529 
530 /*@C
531   PetscPartitionerSetUp - Construct data structures for the PetscPartitioner
532 
533   Collective on PetscPartitioner
534 
535   Input Parameter:
536 . part - the PetscPartitioner object to setup
537 
538   Level: developer
539 
540 .seealso: PetscPartitionerView(), PetscPartitionerDestroy()
541 @*/
542 PetscErrorCode PetscPartitionerSetUp(PetscPartitioner part)
543 {
544   PetscErrorCode ierr;
545 
546   PetscFunctionBegin;
547   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
548   if (part->ops->setup) {ierr = (*part->ops->setup)(part);CHKERRQ(ierr);}
549   PetscFunctionReturn(0);
550 }
551 
552 /*@
553   PetscPartitionerDestroy - Destroys a PetscPartitioner object
554 
555   Collective on PetscPartitioner
556 
557   Input Parameter:
558 . part - the PetscPartitioner object to destroy
559 
560   Level: developer
561 
562 .seealso: PetscPartitionerView()
563 @*/
564 PetscErrorCode PetscPartitionerDestroy(PetscPartitioner *part)
565 {
566   PetscErrorCode ierr;
567 
568   PetscFunctionBegin;
569   if (!*part) PetscFunctionReturn(0);
570   PetscValidHeaderSpecific((*part), PETSCPARTITIONER_CLASSID, 1);
571 
572   if (--((PetscObject)(*part))->refct > 0) {*part = 0; PetscFunctionReturn(0);}
573   ((PetscObject) (*part))->refct = 0;
574 
575   if ((*part)->ops->destroy) {ierr = (*(*part)->ops->destroy)(*part);CHKERRQ(ierr);}
576   ierr = PetscHeaderDestroy(part);CHKERRQ(ierr);
577   PetscFunctionReturn(0);
578 }
579 
580 /*@
581   PetscPartitionerCreate - Creates an empty PetscPartitioner object. The type can then be set with PetscPartitionerSetType().
582 
583   Collective on MPI_Comm
584 
585   Input Parameter:
586 . comm - The communicator for the PetscPartitioner object
587 
588   Output Parameter:
589 . part - The PetscPartitioner object
590 
591   Level: beginner
592 
593 .seealso: PetscPartitionerSetType(), PETSCPARTITIONERCHACO, PETSCPARTITIONERPARMETIS, PETSCPARTITIONERSHELL, PETSCPARTITIONERSIMPLE, PETSCPARTITIONERGATHER
594 @*/
595 PetscErrorCode PetscPartitionerCreate(MPI_Comm comm, PetscPartitioner *part)
596 {
597   PetscPartitioner p;
598   PetscErrorCode   ierr;
599 
600   PetscFunctionBegin;
601   PetscValidPointer(part, 2);
602   *part = NULL;
603   ierr = DMInitializePackage();CHKERRQ(ierr);
604 
605   ierr = PetscHeaderCreate(p, PETSCPARTITIONER_CLASSID, "PetscPartitioner", "Graph Partitioner", "PetscPartitioner", comm, PetscPartitionerDestroy, PetscPartitionerView);CHKERRQ(ierr);
606 
607   *part = p;
608   PetscFunctionReturn(0);
609 }
610 
611 /*@
612   PetscPartitionerPartition - Create a non-overlapping partition of the cells in the mesh
613 
614   Collective on DM
615 
616   Input Parameters:
617 + part    - The PetscPartitioner
618 - dm      - The mesh DM
619 
620   Output Parameters:
621 + partSection     - The PetscSection giving the division of points by partition
622 - partition       - The list of points by partition
623 
624   Note: Instead of cells, points at a given height can be partitioned by calling PetscPartitionerSetPointHeight()
625 
626   Level: developer
627 
628 .seealso DMPlexDistribute(), PetscPartitionerSetPointHeight(), PetscPartitionerCreate()
629 @*/
630 PetscErrorCode PetscPartitionerPartition(PetscPartitioner part, DM dm, PetscSection partSection, IS *partition)
631 {
632   PetscMPIInt    size;
633   PetscErrorCode ierr;
634 
635   PetscFunctionBegin;
636   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
637   PetscValidHeaderSpecific(dm, DM_CLASSID, 2);
638   PetscValidHeaderSpecific(partSection, PETSC_SECTION_CLASSID, 4);
639   PetscValidPointer(partition, 5);
640   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) part), &size);CHKERRQ(ierr);
641   if (size == 1) {
642     PetscInt *points;
643     PetscInt  cStart, cEnd, c;
644 
645     ierr = DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);CHKERRQ(ierr);
646     ierr = PetscSectionSetChart(partSection, 0, size);CHKERRQ(ierr);
647     ierr = PetscSectionSetDof(partSection, 0, cEnd-cStart);CHKERRQ(ierr);
648     ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
649     ierr = PetscMalloc1(cEnd-cStart, &points);CHKERRQ(ierr);
650     for (c = cStart; c < cEnd; ++c) points[c] = c;
651     ierr = ISCreateGeneral(PetscObjectComm((PetscObject) part), cEnd-cStart, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
652     PetscFunctionReturn(0);
653   }
654   /* Obvious cheating, but I cannot think of a better way to do this. The DMSetFromOptions() has refinement in it,
655      so we cannot call it and have it feed down to the partitioner before partitioning */
656   ierr = PetscPartitionerSetFromOptions(part);CHKERRQ(ierr);
657   if (part->height == 0) {
658     PetscInt numVertices;
659     PetscInt *start     = NULL;
660     PetscInt *adjacency = NULL;
661     IS       globalNumbering;
662 
663     ierr = DMPlexCreatePartitionerGraph(dm, 0, &numVertices, &start, &adjacency, &globalNumbering);CHKERRQ(ierr);
664     if (!part->ops->partition) SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_WRONGSTATE, "PetscPartitioner has no type");
665     ierr = (*part->ops->partition)(part, dm, size, numVertices, start, adjacency, partSection, partition);CHKERRQ(ierr);
666     ierr = PetscFree(start);CHKERRQ(ierr);
667     ierr = PetscFree(adjacency);CHKERRQ(ierr);
668     if (globalNumbering) { /* partition is wrt global unique numbering: change this to be wrt local numbering */
669       const PetscInt *globalNum;
670       const PetscInt *partIdx;
671       PetscInt *map, cStart, cEnd;
672       PetscInt *adjusted, i, localSize, offset;
673       IS    newPartition;
674 
675       ierr = ISGetLocalSize(*partition,&localSize);CHKERRQ(ierr);
676       ierr = PetscMalloc1(localSize,&adjusted);CHKERRQ(ierr);
677       ierr = ISGetIndices(globalNumbering,&globalNum);CHKERRQ(ierr);
678       ierr = ISGetIndices(*partition,&partIdx);CHKERRQ(ierr);
679       ierr = PetscMalloc1(localSize,&map);CHKERRQ(ierr);
680       ierr = DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);CHKERRQ(ierr);
681       for (i = cStart, offset = 0; i < cEnd; i++) {
682         if (globalNum[i - cStart] >= 0) map[offset++] = i;
683       }
684       for (i = 0; i < localSize; i++) {
685         adjusted[i] = map[partIdx[i]];
686       }
687       ierr = PetscFree(map);CHKERRQ(ierr);
688       ierr = ISRestoreIndices(*partition,&partIdx);CHKERRQ(ierr);
689       ierr = ISRestoreIndices(globalNumbering,&globalNum);CHKERRQ(ierr);
690       ierr = ISCreateGeneral(PETSC_COMM_SELF,localSize,adjusted,PETSC_OWN_POINTER,&newPartition);CHKERRQ(ierr);
691       ierr = ISDestroy(&globalNumbering);CHKERRQ(ierr);
692       ierr = ISDestroy(partition);CHKERRQ(ierr);
693       *partition = newPartition;
694     }
695   } else SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_OUTOFRANGE, "Invalid height %D for points to partition", part->height);
696   PetscFunctionReturn(0);
697 
698 }
699 
700 static PetscErrorCode PetscPartitionerDestroy_Shell(PetscPartitioner part)
701 {
702   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
703   PetscErrorCode          ierr;
704 
705   PetscFunctionBegin;
706   ierr = PetscSectionDestroy(&p->section);CHKERRQ(ierr);
707   ierr = ISDestroy(&p->partition);CHKERRQ(ierr);
708   ierr = PetscFree(p);CHKERRQ(ierr);
709   PetscFunctionReturn(0);
710 }
711 
712 static PetscErrorCode PetscPartitionerView_Shell_Ascii(PetscPartitioner part, PetscViewer viewer)
713 {
714   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
715   PetscViewerFormat       format;
716   PetscErrorCode          ierr;
717 
718   PetscFunctionBegin;
719   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
720   ierr = PetscViewerASCIIPrintf(viewer, "Shell Graph Partitioner:\n");CHKERRQ(ierr);
721   if (p->random) {
722     ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
723     ierr = PetscViewerASCIIPrintf(viewer, "using random partition\n");CHKERRQ(ierr);
724     ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
725   }
726   PetscFunctionReturn(0);
727 }
728 
729 static PetscErrorCode PetscPartitionerView_Shell(PetscPartitioner part, PetscViewer viewer)
730 {
731   PetscBool      iascii;
732   PetscErrorCode ierr;
733 
734   PetscFunctionBegin;
735   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
736   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
737   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
738   if (iascii) {ierr = PetscPartitionerView_Shell_Ascii(part, viewer);CHKERRQ(ierr);}
739   PetscFunctionReturn(0);
740 }
741 
742 static PetscErrorCode PetscPartitionerSetFromOptions_Shell(PetscOptionItems *PetscOptionsObject, PetscPartitioner part)
743 {
744   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
745   PetscErrorCode          ierr;
746 
747   PetscFunctionBegin;
748   ierr = PetscOptionsHead(PetscOptionsObject, "PetscPartitionerShell Options");CHKERRQ(ierr);
749   ierr = PetscOptionsBool("-petscpartitioner_shell_random", "Use a random partition", "PetscPartitionerView", PETSC_FALSE, &p->random, NULL);CHKERRQ(ierr);
750   ierr = PetscOptionsTail();CHKERRQ(ierr);
751   PetscFunctionReturn(0);
752 }
753 
754 static PetscErrorCode PetscPartitionerPartition_Shell(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
755 {
756   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
757   PetscInt                np;
758   PetscErrorCode          ierr;
759 
760   PetscFunctionBegin;
761   if (p->random) {
762     PetscRandom r;
763     PetscInt   *sizes, *points, v, p;
764     PetscMPIInt rank;
765 
766     ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
767     ierr = PetscRandomCreate(PETSC_COMM_SELF, &r);CHKERRQ(ierr);
768     ierr = PetscRandomSetInterval(r, 0.0, (PetscScalar) nparts);CHKERRQ(ierr);
769     ierr = PetscRandomSetFromOptions(r);CHKERRQ(ierr);
770     ierr = PetscCalloc2(nparts, &sizes, numVertices, &points);CHKERRQ(ierr);
771     for (v = 0; v < numVertices; ++v) {points[v] = v;}
772     for (p = 0; p < nparts; ++p) {sizes[p] = numVertices/nparts + (PetscInt) (p < numVertices % nparts);}
773     for (v = numVertices-1; v > 0; --v) {
774       PetscReal val;
775       PetscInt  w, tmp;
776 
777       ierr = PetscRandomSetInterval(r, 0.0, (PetscScalar) (v+1));CHKERRQ(ierr);
778       ierr = PetscRandomGetValueReal(r, &val);CHKERRQ(ierr);
779       w    = PetscFloorReal(val);
780       tmp       = points[v];
781       points[v] = points[w];
782       points[w] = tmp;
783     }
784     ierr = PetscRandomDestroy(&r);CHKERRQ(ierr);
785     ierr = PetscPartitionerShellSetPartition(part, nparts, sizes, points);CHKERRQ(ierr);
786     ierr = PetscFree2(sizes, points);CHKERRQ(ierr);
787   }
788   if (!p->section) SETERRQ(PetscObjectComm((PetscObject) dm), PETSC_ERR_ARG_WRONG, "Shell partitioner information not provided. Please call PetscPartitionerShellSetPartition()");
789   ierr = PetscSectionGetChart(p->section, NULL, &np);CHKERRQ(ierr);
790   if (nparts != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of requested partitions %d != configured partitions %d", nparts, np);
791   ierr = ISGetLocalSize(p->partition, &np);CHKERRQ(ierr);
792   if (numVertices != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of input vertices %d != configured vertices %d", numVertices, np);
793   ierr = PetscSectionCopy(p->section, partSection);CHKERRQ(ierr);
794   *partition = p->partition;
795   ierr = PetscObjectReference((PetscObject) p->partition);CHKERRQ(ierr);
796   PetscFunctionReturn(0);
797 }
798 
799 static PetscErrorCode PetscPartitionerInitialize_Shell(PetscPartitioner part)
800 {
801   PetscFunctionBegin;
802   part->ops->view           = PetscPartitionerView_Shell;
803   part->ops->setfromoptions = PetscPartitionerSetFromOptions_Shell;
804   part->ops->destroy        = PetscPartitionerDestroy_Shell;
805   part->ops->partition      = PetscPartitionerPartition_Shell;
806   PetscFunctionReturn(0);
807 }
808 
809 /*MC
810   PETSCPARTITIONERSHELL = "shell" - A PetscPartitioner object
811 
812   Level: intermediate
813 
814 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
815 M*/
816 
817 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Shell(PetscPartitioner part)
818 {
819   PetscPartitioner_Shell *p;
820   PetscErrorCode          ierr;
821 
822   PetscFunctionBegin;
823   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
824   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
825   part->data = p;
826 
827   ierr = PetscPartitionerInitialize_Shell(part);CHKERRQ(ierr);
828   p->random = PETSC_FALSE;
829   PetscFunctionReturn(0);
830 }
831 
832 /*@C
833   PetscPartitionerShellSetPartition - Set an artifical partition for a mesh
834 
835   Collective on Part
836 
837   Input Parameters:
838 + part     - The PetscPartitioner
839 . size - The number of partitions
840 . sizes    - array of size size (or NULL) providing the number of points in each partition
841 - points   - array of size sum(sizes) (may be NULL iff sizes is NULL), a permutation of the points that groups those assigned to each partition in order (i.e., partition 0 first, partition 1 next, etc.)
842 
843   Level: developer
844 
845   Notes:
846 
847     It is safe to free the sizes and points arrays after use in this routine.
848 
849 .seealso DMPlexDistribute(), PetscPartitionerCreate()
850 @*/
851 PetscErrorCode PetscPartitionerShellSetPartition(PetscPartitioner part, PetscInt size, const PetscInt sizes[], const PetscInt points[])
852 {
853   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
854   PetscInt                proc, numPoints;
855   PetscErrorCode          ierr;
856 
857   PetscFunctionBegin;
858   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
859   if (sizes)  {PetscValidPointer(sizes, 3);}
860   if (points) {PetscValidPointer(points, 4);}
861   ierr = PetscSectionDestroy(&p->section);CHKERRQ(ierr);
862   ierr = ISDestroy(&p->partition);CHKERRQ(ierr);
863   ierr = PetscSectionCreate(PetscObjectComm((PetscObject) part), &p->section);CHKERRQ(ierr);
864   ierr = PetscSectionSetChart(p->section, 0, size);CHKERRQ(ierr);
865   if (sizes) {
866     for (proc = 0; proc < size; ++proc) {
867       ierr = PetscSectionSetDof(p->section, proc, sizes[proc]);CHKERRQ(ierr);
868     }
869   }
870   ierr = PetscSectionSetUp(p->section);CHKERRQ(ierr);
871   ierr = PetscSectionGetStorageSize(p->section, &numPoints);CHKERRQ(ierr);
872   ierr = ISCreateGeneral(PetscObjectComm((PetscObject) part), numPoints, points, PETSC_COPY_VALUES, &p->partition);CHKERRQ(ierr);
873   PetscFunctionReturn(0);
874 }
875 
876 /*@
877   PetscPartitionerShellSetRandom - Set the flag to use a random partition
878 
879   Collective on Part
880 
881   Input Parameters:
882 + part   - The PetscPartitioner
883 - random - The flag to use a random partition
884 
885   Level: intermediate
886 
887 .seealso PetscPartitionerShellGetRandom(), PetscPartitionerCreate()
888 @*/
889 PetscErrorCode PetscPartitionerShellSetRandom(PetscPartitioner part, PetscBool random)
890 {
891   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
892 
893   PetscFunctionBegin;
894   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
895   p->random = random;
896   PetscFunctionReturn(0);
897 }
898 
899 /*@
900   PetscPartitionerShellGetRandom - get the flag to use a random partition
901 
902   Collective on Part
903 
904   Input Parameter:
905 . part   - The PetscPartitioner
906 
907   Output Parameter
908 . random - The flag to use a random partition
909 
910   Level: intermediate
911 
912 .seealso PetscPartitionerShellSetRandom(), PetscPartitionerCreate()
913 @*/
914 PetscErrorCode PetscPartitionerShellGetRandom(PetscPartitioner part, PetscBool *random)
915 {
916   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
917 
918   PetscFunctionBegin;
919   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
920   PetscValidPointer(random, 2);
921   *random = p->random;
922   PetscFunctionReturn(0);
923 }
924 
925 static PetscErrorCode PetscPartitionerDestroy_Simple(PetscPartitioner part)
926 {
927   PetscPartitioner_Simple *p = (PetscPartitioner_Simple *) part->data;
928   PetscErrorCode          ierr;
929 
930   PetscFunctionBegin;
931   ierr = PetscFree(p);CHKERRQ(ierr);
932   PetscFunctionReturn(0);
933 }
934 
935 static PetscErrorCode PetscPartitionerView_Simple_Ascii(PetscPartitioner part, PetscViewer viewer)
936 {
937   PetscViewerFormat format;
938   PetscErrorCode    ierr;
939 
940   PetscFunctionBegin;
941   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
942   ierr = PetscViewerASCIIPrintf(viewer, "Simple Graph Partitioner:\n");CHKERRQ(ierr);
943   PetscFunctionReturn(0);
944 }
945 
946 static PetscErrorCode PetscPartitionerView_Simple(PetscPartitioner part, PetscViewer viewer)
947 {
948   PetscBool      iascii;
949   PetscErrorCode ierr;
950 
951   PetscFunctionBegin;
952   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
953   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
954   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
955   if (iascii) {ierr = PetscPartitionerView_Simple_Ascii(part, viewer);CHKERRQ(ierr);}
956   PetscFunctionReturn(0);
957 }
958 
959 static PetscErrorCode PetscPartitionerPartition_Simple(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
960 {
961   MPI_Comm       comm;
962   PetscInt       np;
963   PetscMPIInt    size;
964   PetscErrorCode ierr;
965 
966   PetscFunctionBegin;
967   comm = PetscObjectComm((PetscObject)dm);
968   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
969   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
970   ierr = ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);CHKERRQ(ierr);
971   if (size == 1) {
972     for (np = 0; np < nparts; ++np) {ierr = PetscSectionSetDof(partSection, np, numVertices/nparts + ((numVertices % nparts) > np));CHKERRQ(ierr);}
973   }
974   else {
975     PetscMPIInt rank;
976     PetscInt nvGlobal, *offsets, myFirst, myLast;
977 
978     ierr = PetscMalloc1(size+1,&offsets);CHKERRQ(ierr);
979     offsets[0] = 0;
980     ierr = MPI_Allgather(&numVertices,1,MPIU_INT,&offsets[1],1,MPIU_INT,comm);CHKERRQ(ierr);
981     for (np = 2; np <= size; np++) {
982       offsets[np] += offsets[np-1];
983     }
984     nvGlobal = offsets[size];
985     ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
986     myFirst = offsets[rank];
987     myLast  = offsets[rank + 1] - 1;
988     ierr = PetscFree(offsets);CHKERRQ(ierr);
989     if (numVertices) {
990       PetscInt firstPart = 0, firstLargePart = 0;
991       PetscInt lastPart = 0, lastLargePart = 0;
992       PetscInt rem = nvGlobal % nparts;
993       PetscInt pSmall = nvGlobal/nparts;
994       PetscInt pBig = nvGlobal/nparts + 1;
995 
996 
997       if (rem) {
998         firstLargePart = myFirst / pBig;
999         lastLargePart  = myLast  / pBig;
1000 
1001         if (firstLargePart < rem) {
1002           firstPart = firstLargePart;
1003         }
1004         else {
1005           firstPart = rem + (myFirst - (rem * pBig)) / pSmall;
1006         }
1007         if (lastLargePart < rem) {
1008           lastPart = lastLargePart;
1009         }
1010         else {
1011           lastPart = rem + (myLast - (rem * pBig)) / pSmall;
1012         }
1013       }
1014       else {
1015         firstPart = myFirst / (nvGlobal/nparts);
1016         lastPart  = myLast  / (nvGlobal/nparts);
1017       }
1018 
1019       for (np = firstPart; np <= lastPart; np++) {
1020         PetscInt PartStart =  np    * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np);
1021         PetscInt PartEnd   = (np+1) * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np+1);
1022 
1023         PartStart = PetscMax(PartStart,myFirst);
1024         PartEnd   = PetscMin(PartEnd,myLast+1);
1025         ierr = PetscSectionSetDof(partSection,np,PartEnd-PartStart);CHKERRQ(ierr);
1026       }
1027     }
1028   }
1029   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1030   PetscFunctionReturn(0);
1031 }
1032 
1033 static PetscErrorCode PetscPartitionerInitialize_Simple(PetscPartitioner part)
1034 {
1035   PetscFunctionBegin;
1036   part->ops->view      = PetscPartitionerView_Simple;
1037   part->ops->destroy   = PetscPartitionerDestroy_Simple;
1038   part->ops->partition = PetscPartitionerPartition_Simple;
1039   PetscFunctionReturn(0);
1040 }
1041 
1042 /*MC
1043   PETSCPARTITIONERSIMPLE = "simple" - A PetscPartitioner object
1044 
1045   Level: intermediate
1046 
1047 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1048 M*/
1049 
1050 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Simple(PetscPartitioner part)
1051 {
1052   PetscPartitioner_Simple *p;
1053   PetscErrorCode           ierr;
1054 
1055   PetscFunctionBegin;
1056   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1057   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1058   part->data = p;
1059 
1060   ierr = PetscPartitionerInitialize_Simple(part);CHKERRQ(ierr);
1061   PetscFunctionReturn(0);
1062 }
1063 
1064 static PetscErrorCode PetscPartitionerDestroy_Gather(PetscPartitioner part)
1065 {
1066   PetscPartitioner_Gather *p = (PetscPartitioner_Gather *) part->data;
1067   PetscErrorCode          ierr;
1068 
1069   PetscFunctionBegin;
1070   ierr = PetscFree(p);CHKERRQ(ierr);
1071   PetscFunctionReturn(0);
1072 }
1073 
1074 static PetscErrorCode PetscPartitionerView_Gather_Ascii(PetscPartitioner part, PetscViewer viewer)
1075 {
1076   PetscViewerFormat format;
1077   PetscErrorCode    ierr;
1078 
1079   PetscFunctionBegin;
1080   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
1081   ierr = PetscViewerASCIIPrintf(viewer, "Gather Graph Partitioner:\n");CHKERRQ(ierr);
1082   PetscFunctionReturn(0);
1083 }
1084 
1085 static PetscErrorCode PetscPartitionerView_Gather(PetscPartitioner part, PetscViewer viewer)
1086 {
1087   PetscBool      iascii;
1088   PetscErrorCode ierr;
1089 
1090   PetscFunctionBegin;
1091   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1092   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1093   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1094   if (iascii) {ierr = PetscPartitionerView_Gather_Ascii(part, viewer);CHKERRQ(ierr);}
1095   PetscFunctionReturn(0);
1096 }
1097 
1098 static PetscErrorCode PetscPartitionerPartition_Gather(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1099 {
1100   PetscInt       np;
1101   PetscErrorCode ierr;
1102 
1103   PetscFunctionBegin;
1104   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1105   ierr = ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);CHKERRQ(ierr);
1106   ierr = PetscSectionSetDof(partSection,0,numVertices);CHKERRQ(ierr);
1107   for (np = 1; np < nparts; ++np) {ierr = PetscSectionSetDof(partSection, np, 0);CHKERRQ(ierr);}
1108   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1109   PetscFunctionReturn(0);
1110 }
1111 
1112 static PetscErrorCode PetscPartitionerInitialize_Gather(PetscPartitioner part)
1113 {
1114   PetscFunctionBegin;
1115   part->ops->view      = PetscPartitionerView_Gather;
1116   part->ops->destroy   = PetscPartitionerDestroy_Gather;
1117   part->ops->partition = PetscPartitionerPartition_Gather;
1118   PetscFunctionReturn(0);
1119 }
1120 
1121 /*MC
1122   PETSCPARTITIONERGATHER = "gather" - A PetscPartitioner object
1123 
1124   Level: intermediate
1125 
1126 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1127 M*/
1128 
1129 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Gather(PetscPartitioner part)
1130 {
1131   PetscPartitioner_Gather *p;
1132   PetscErrorCode           ierr;
1133 
1134   PetscFunctionBegin;
1135   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1136   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1137   part->data = p;
1138 
1139   ierr = PetscPartitionerInitialize_Gather(part);CHKERRQ(ierr);
1140   PetscFunctionReturn(0);
1141 }
1142 
1143 
1144 static PetscErrorCode PetscPartitionerDestroy_Chaco(PetscPartitioner part)
1145 {
1146   PetscPartitioner_Chaco *p = (PetscPartitioner_Chaco *) part->data;
1147   PetscErrorCode          ierr;
1148 
1149   PetscFunctionBegin;
1150   ierr = PetscFree(p);CHKERRQ(ierr);
1151   PetscFunctionReturn(0);
1152 }
1153 
1154 static PetscErrorCode PetscPartitionerView_Chaco_Ascii(PetscPartitioner part, PetscViewer viewer)
1155 {
1156   PetscViewerFormat format;
1157   PetscErrorCode    ierr;
1158 
1159   PetscFunctionBegin;
1160   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
1161   ierr = PetscViewerASCIIPrintf(viewer, "Chaco Graph Partitioner:\n");CHKERRQ(ierr);
1162   PetscFunctionReturn(0);
1163 }
1164 
1165 static PetscErrorCode PetscPartitionerView_Chaco(PetscPartitioner part, PetscViewer viewer)
1166 {
1167   PetscBool      iascii;
1168   PetscErrorCode ierr;
1169 
1170   PetscFunctionBegin;
1171   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1172   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1173   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1174   if (iascii) {ierr = PetscPartitionerView_Chaco_Ascii(part, viewer);CHKERRQ(ierr);}
1175   PetscFunctionReturn(0);
1176 }
1177 
1178 #if defined(PETSC_HAVE_CHACO)
1179 #if defined(PETSC_HAVE_UNISTD_H)
1180 #include <unistd.h>
1181 #endif
1182 #if defined(PETSC_HAVE_CHACO_INT_ASSIGNMENT)
1183 #include <chaco.h>
1184 #else
1185 /* Older versions of Chaco do not have an include file */
1186 PETSC_EXTERN int interface(int nvtxs, int *start, int *adjacency, int *vwgts,
1187                        float *ewgts, float *x, float *y, float *z, char *outassignname,
1188                        char *outfilename, short *assignment, int architecture, int ndims_tot,
1189                        int mesh_dims[3], double *goal, int global_method, int local_method,
1190                        int rqi_flag, int vmax, int ndims, double eigtol, long seed);
1191 #endif
1192 extern int FREE_GRAPH;
1193 #endif
1194 
1195 static PetscErrorCode PetscPartitionerPartition_Chaco(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1196 {
1197 #if defined(PETSC_HAVE_CHACO)
1198   enum {DEFAULT_METHOD = 1, INERTIAL_METHOD = 3};
1199   MPI_Comm       comm;
1200   int            nvtxs          = numVertices; /* number of vertices in full graph */
1201   int           *vwgts          = NULL;   /* weights for all vertices */
1202   float         *ewgts          = NULL;   /* weights for all edges */
1203   float         *x              = NULL, *y = NULL, *z = NULL; /* coordinates for inertial method */
1204   char          *outassignname  = NULL;   /*  name of assignment output file */
1205   char          *outfilename    = NULL;   /* output file name */
1206   int            architecture   = 1;      /* 0 => hypercube, d => d-dimensional mesh */
1207   int            ndims_tot      = 0;      /* total number of cube dimensions to divide */
1208   int            mesh_dims[3];            /* dimensions of mesh of processors */
1209   double        *goal          = NULL;    /* desired set sizes for each set */
1210   int            global_method = 1;       /* global partitioning algorithm */
1211   int            local_method  = 1;       /* local partitioning algorithm */
1212   int            rqi_flag      = 0;       /* should I use RQI/Symmlq eigensolver? */
1213   int            vmax          = 200;     /* how many vertices to coarsen down to? */
1214   int            ndims         = 1;       /* number of eigenvectors (2^d sets) */
1215   double         eigtol        = 0.001;   /* tolerance on eigenvectors */
1216   long           seed          = 123636512; /* for random graph mutations */
1217 #if defined(PETSC_HAVE_CHACO_INT_ASSIGNMENT)
1218   int           *assignment;              /* Output partition */
1219 #else
1220   short int     *assignment;              /* Output partition */
1221 #endif
1222   int            fd_stdout, fd_pipe[2];
1223   PetscInt      *points;
1224   int            i, v, p;
1225   PetscErrorCode ierr;
1226 
1227   PetscFunctionBegin;
1228   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
1229   if (!numVertices) {
1230     ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1231     ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1232     ierr = ISCreateGeneral(comm, 0, NULL, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1233     PetscFunctionReturn(0);
1234   }
1235   FREE_GRAPH = 0;                         /* Do not let Chaco free my memory */
1236   for (i = 0; i < start[numVertices]; ++i) ++adjacency[i];
1237 
1238   if (global_method == INERTIAL_METHOD) {
1239     /* manager.createCellCoordinates(nvtxs, &x, &y, &z); */
1240     SETERRQ(comm, PETSC_ERR_SUP, "Inertial partitioning not yet supported");
1241   }
1242   mesh_dims[0] = nparts;
1243   mesh_dims[1] = 1;
1244   mesh_dims[2] = 1;
1245   ierr = PetscMalloc1(nvtxs, &assignment);CHKERRQ(ierr);
1246   /* Chaco outputs to stdout. We redirect this to a buffer. */
1247   /* TODO: check error codes for UNIX calls */
1248 #if defined(PETSC_HAVE_UNISTD_H)
1249   {
1250     int piperet;
1251     piperet = pipe(fd_pipe);
1252     if (piperet) SETERRQ(comm,PETSC_ERR_SYS,"Could not create pipe");
1253     fd_stdout = dup(1);
1254     close(1);
1255     dup2(fd_pipe[1], 1);
1256   }
1257 #endif
1258   ierr = interface(nvtxs, (int*) start, (int*) adjacency, vwgts, ewgts, x, y, z, outassignname, outfilename,
1259                    assignment, architecture, ndims_tot, mesh_dims, goal, global_method, local_method, rqi_flag,
1260                    vmax, ndims, eigtol, seed);
1261 #if defined(PETSC_HAVE_UNISTD_H)
1262   {
1263     char msgLog[10000];
1264     int  count;
1265 
1266     fflush(stdout);
1267     count = read(fd_pipe[0], msgLog, (10000-1)*sizeof(char));
1268     if (count < 0) count = 0;
1269     msgLog[count] = 0;
1270     close(1);
1271     dup2(fd_stdout, 1);
1272     close(fd_stdout);
1273     close(fd_pipe[0]);
1274     close(fd_pipe[1]);
1275     if (ierr) SETERRQ1(comm, PETSC_ERR_LIB, "Error in Chaco library: %s", msgLog);
1276   }
1277 #endif
1278   /* Convert to PetscSection+IS */
1279   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1280   for (v = 0; v < nvtxs; ++v) {
1281     ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);
1282   }
1283   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1284   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
1285   for (p = 0, i = 0; p < nparts; ++p) {
1286     for (v = 0; v < nvtxs; ++v) {
1287       if (assignment[v] == p) points[i++] = v;
1288     }
1289   }
1290   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1291   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1292   if (global_method == INERTIAL_METHOD) {
1293     /* manager.destroyCellCoordinates(nvtxs, &x, &y, &z); */
1294   }
1295   ierr = PetscFree(assignment);CHKERRQ(ierr);
1296   for (i = 0; i < start[numVertices]; ++i) --adjacency[i];
1297   PetscFunctionReturn(0);
1298 #else
1299   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-chaco.");
1300 #endif
1301 }
1302 
1303 static PetscErrorCode PetscPartitionerInitialize_Chaco(PetscPartitioner part)
1304 {
1305   PetscFunctionBegin;
1306   part->ops->view      = PetscPartitionerView_Chaco;
1307   part->ops->destroy   = PetscPartitionerDestroy_Chaco;
1308   part->ops->partition = PetscPartitionerPartition_Chaco;
1309   PetscFunctionReturn(0);
1310 }
1311 
1312 /*MC
1313   PETSCPARTITIONERCHACO = "chaco" - A PetscPartitioner object using the Chaco library
1314 
1315   Level: intermediate
1316 
1317 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1318 M*/
1319 
1320 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Chaco(PetscPartitioner part)
1321 {
1322   PetscPartitioner_Chaco *p;
1323   PetscErrorCode          ierr;
1324 
1325   PetscFunctionBegin;
1326   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1327   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1328   part->data = p;
1329 
1330   ierr = PetscPartitionerInitialize_Chaco(part);CHKERRQ(ierr);
1331   ierr = PetscCitationsRegister(ChacoPartitionerCitation, &ChacoPartitionercite);CHKERRQ(ierr);
1332   PetscFunctionReturn(0);
1333 }
1334 
1335 static PetscErrorCode PetscPartitionerDestroy_ParMetis(PetscPartitioner part)
1336 {
1337   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
1338   PetscErrorCode             ierr;
1339 
1340   PetscFunctionBegin;
1341   ierr = PetscFree(p);CHKERRQ(ierr);
1342   PetscFunctionReturn(0);
1343 }
1344 
1345 static PetscErrorCode PetscPartitionerView_ParMetis_Ascii(PetscPartitioner part, PetscViewer viewer)
1346 {
1347   PetscViewerFormat format;
1348   PetscErrorCode    ierr;
1349 
1350   PetscFunctionBegin;
1351   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
1352   ierr = PetscViewerASCIIPrintf(viewer, "ParMetis Graph Partitioner:\n");CHKERRQ(ierr);
1353   PetscFunctionReturn(0);
1354 }
1355 
1356 static PetscErrorCode PetscPartitionerView_ParMetis(PetscPartitioner part, PetscViewer viewer)
1357 {
1358   PetscBool      iascii;
1359   PetscErrorCode ierr;
1360 
1361   PetscFunctionBegin;
1362   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1363   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1364   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1365   if (iascii) {ierr = PetscPartitionerView_ParMetis_Ascii(part, viewer);CHKERRQ(ierr);}
1366   PetscFunctionReturn(0);
1367 }
1368 
1369 #if defined(PETSC_HAVE_PARMETIS)
1370 #include <parmetis.h>
1371 #endif
1372 
1373 static PetscErrorCode PetscPartitionerPartition_ParMetis(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1374 {
1375 #if defined(PETSC_HAVE_PARMETIS)
1376   MPI_Comm       comm;
1377   PetscSection   section;
1378   PetscInt       nvtxs      = numVertices; /* The number of vertices in full graph */
1379   PetscInt      *vtxdist;                  /* Distribution of vertices across processes */
1380   PetscInt      *xadj       = start;       /* Start of edge list for each vertex */
1381   PetscInt      *adjncy     = adjacency;   /* Edge lists for all vertices */
1382   PetscInt      *vwgt       = NULL;        /* Vertex weights */
1383   PetscInt      *adjwgt     = NULL;        /* Edge weights */
1384   PetscInt       wgtflag    = 0;           /* Indicates which weights are present */
1385   PetscInt       numflag    = 0;           /* Indicates initial offset (0 or 1) */
1386   PetscInt       ncon       = 1;           /* The number of weights per vertex */
1387   PetscReal     *tpwgts;                   /* The fraction of vertex weights assigned to each partition */
1388   PetscReal     *ubvec;                    /* The balance intolerance for vertex weights */
1389   PetscInt       options[5];               /* Options */
1390   /* Outputs */
1391   PetscInt       edgeCut;                  /* The number of edges cut by the partition */
1392   PetscInt      *assignment, *points;
1393   PetscMPIInt    rank, p, v, i;
1394   PetscErrorCode ierr;
1395 
1396   PetscFunctionBegin;
1397   ierr = PetscObjectGetComm((PetscObject) part, &comm);CHKERRQ(ierr);
1398   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1399   options[0] = 0; /* Use all defaults */
1400   /* Calculate vertex distribution */
1401   ierr = PetscMalloc5(nparts+1,&vtxdist,nparts*ncon,&tpwgts,ncon,&ubvec,nvtxs,&assignment,nvtxs,&vwgt);CHKERRQ(ierr);
1402   vtxdist[0] = 0;
1403   ierr = MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm);CHKERRQ(ierr);
1404   for (p = 2; p <= nparts; ++p) {
1405     vtxdist[p] += vtxdist[p-1];
1406   }
1407   /* Calculate weights */
1408   for (p = 0; p < nparts; ++p) {
1409     tpwgts[p] = 1.0/nparts;
1410   }
1411   ubvec[0] = 1.05;
1412   /* Weight cells by dofs on cell by default */
1413   ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
1414   if (section) {
1415     PetscInt cStart, cEnd, dof;
1416 
1417     ierr = DMPlexGetHeightStratum(dm, 0, &cStart, &cEnd);CHKERRQ(ierr);
1418     for (v = cStart; v < cEnd; ++v) {
1419       ierr = PetscSectionGetDof(section, v, &dof);CHKERRQ(ierr);
1420       /* WARNING: Assumes that meshes with overlap have the overlapped cells at the end of the stratum. */
1421       /* To do this properly, we should use the cell numbering created in DMPlexCreatePartitionerGraph. */
1422       if (v-cStart < numVertices) vwgt[v-cStart] = PetscMax(dof, 1);
1423     }
1424   } else {
1425     for (v = 0; v < nvtxs; ++v) vwgt[v] = 1;
1426   }
1427 
1428   if (nparts == 1) {
1429     ierr = PetscMemzero(assignment, nvtxs * sizeof(PetscInt));CHKERRQ(ierr);
1430   } else {
1431     if (vtxdist[1] == vtxdist[nparts]) {
1432       if (!rank) {
1433         PetscStackPush("METIS_PartGraphKway");
1434         ierr = METIS_PartGraphKway(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, adjwgt, &nparts, tpwgts, ubvec, NULL, &edgeCut, assignment);
1435         PetscStackPop;
1436         if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphKway()");
1437       }
1438     } else {
1439       PetscStackPush("ParMETIS_V3_PartKway");
1440       ierr = ParMETIS_V3_PartKway(vtxdist, xadj, adjncy, vwgt, adjwgt, &wgtflag, &numflag, &ncon, &nparts, tpwgts, ubvec, options, &edgeCut, assignment, &comm);
1441       PetscStackPop;
1442       if (ierr != METIS_OK) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error %d in ParMETIS_V3_PartKway()", ierr);
1443     }
1444   }
1445   /* Convert to PetscSection+IS */
1446   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1447   for (v = 0; v < nvtxs; ++v) {ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);}
1448   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1449   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
1450   for (p = 0, i = 0; p < nparts; ++p) {
1451     for (v = 0; v < nvtxs; ++v) {
1452       if (assignment[v] == p) points[i++] = v;
1453     }
1454   }
1455   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1456   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1457   ierr = PetscFree5(vtxdist,tpwgts,ubvec,assignment,vwgt);CHKERRQ(ierr);
1458   PetscFunctionReturn(0);
1459 #else
1460   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-parmetis.");
1461 #endif
1462 }
1463 
1464 static PetscErrorCode PetscPartitionerInitialize_ParMetis(PetscPartitioner part)
1465 {
1466   PetscFunctionBegin;
1467   part->ops->view      = PetscPartitionerView_ParMetis;
1468   part->ops->destroy   = PetscPartitionerDestroy_ParMetis;
1469   part->ops->partition = PetscPartitionerPartition_ParMetis;
1470   PetscFunctionReturn(0);
1471 }
1472 
1473 /*MC
1474   PETSCPARTITIONERPARMETIS = "parmetis" - A PetscPartitioner object using the ParMetis library
1475 
1476   Level: intermediate
1477 
1478 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1479 M*/
1480 
1481 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_ParMetis(PetscPartitioner part)
1482 {
1483   PetscPartitioner_ParMetis *p;
1484   PetscErrorCode          ierr;
1485 
1486   PetscFunctionBegin;
1487   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1488   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1489   part->data = p;
1490 
1491   ierr = PetscPartitionerInitialize_ParMetis(part);CHKERRQ(ierr);
1492   ierr = PetscCitationsRegister(ParMetisPartitionerCitation, &ParMetisPartitionercite);CHKERRQ(ierr);
1493   PetscFunctionReturn(0);
1494 }
1495 
1496 /*@
1497   DMPlexGetPartitioner - Get the mesh partitioner
1498 
1499   Not collective
1500 
1501   Input Parameter:
1502 . dm - The DM
1503 
1504   Output Parameter:
1505 . part - The PetscPartitioner
1506 
1507   Level: developer
1508 
1509   Note: This gets a borrowed reference, so the user should not destroy this PetscPartitioner.
1510 
1511 .seealso DMPlexDistribute(), DMPlexSetPartitioner(), PetscPartitionerCreate()
1512 @*/
1513 PetscErrorCode DMPlexGetPartitioner(DM dm, PetscPartitioner *part)
1514 {
1515   DM_Plex       *mesh = (DM_Plex *) dm->data;
1516   PetscErrorCode ierr;
1517 
1518   PetscFunctionBegin;
1519   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1520   PetscValidPointer(part, 2);
1521   ierr = PetscPartitionerSetTypeFromOptions_Internal(mesh->partitioner);CHKERRQ(ierr);
1522   *part = mesh->partitioner;
1523   PetscFunctionReturn(0);
1524 }
1525 
1526 /*@
1527   DMPlexSetPartitioner - Set the mesh partitioner
1528 
1529   logically collective on dm and part
1530 
1531   Input Parameters:
1532 + dm - The DM
1533 - part - The partitioner
1534 
1535   Level: developer
1536 
1537   Note: Any existing PetscPartitioner will be destroyed.
1538 
1539 .seealso DMPlexDistribute(), DMPlexGetPartitioner(), PetscPartitionerCreate()
1540 @*/
1541 PetscErrorCode DMPlexSetPartitioner(DM dm, PetscPartitioner part)
1542 {
1543   DM_Plex       *mesh = (DM_Plex *) dm->data;
1544   PetscErrorCode ierr;
1545 
1546   PetscFunctionBegin;
1547   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1548   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 2);
1549   ierr = PetscObjectReference((PetscObject)part);CHKERRQ(ierr);
1550   ierr = PetscPartitionerDestroy(&mesh->partitioner);CHKERRQ(ierr);
1551   mesh->partitioner = part;
1552   PetscFunctionReturn(0);
1553 }
1554 
1555 static PetscErrorCode DMPlexPartitionLabelClosure_Tree(DM dm, DMLabel label, PetscInt rank, PetscInt point, PetscBool up, PetscBool down)
1556 {
1557   PetscErrorCode ierr;
1558 
1559   PetscFunctionBegin;
1560   if (up) {
1561     PetscInt parent;
1562 
1563     ierr = DMPlexGetTreeParent(dm,point,&parent,NULL);CHKERRQ(ierr);
1564     if (parent != point) {
1565       PetscInt closureSize, *closure = NULL, i;
1566 
1567       ierr = DMPlexGetTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);CHKERRQ(ierr);
1568       for (i = 0; i < closureSize; i++) {
1569         PetscInt cpoint = closure[2*i];
1570 
1571         ierr = DMLabelSetValue(label,cpoint,rank);CHKERRQ(ierr);
1572         ierr = DMPlexPartitionLabelClosure_Tree(dm,label,rank,cpoint,PETSC_TRUE,PETSC_FALSE);CHKERRQ(ierr);
1573       }
1574       ierr = DMPlexRestoreTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);CHKERRQ(ierr);
1575     }
1576   }
1577   if (down) {
1578     PetscInt numChildren;
1579     const PetscInt *children;
1580 
1581     ierr = DMPlexGetTreeChildren(dm,point,&numChildren,&children);CHKERRQ(ierr);
1582     if (numChildren) {
1583       PetscInt i;
1584 
1585       for (i = 0; i < numChildren; i++) {
1586         PetscInt cpoint = children[i];
1587 
1588         ierr = DMLabelSetValue(label,cpoint,rank);CHKERRQ(ierr);
1589         ierr = DMPlexPartitionLabelClosure_Tree(dm,label,rank,cpoint,PETSC_FALSE,PETSC_TRUE);CHKERRQ(ierr);
1590       }
1591     }
1592   }
1593   PetscFunctionReturn(0);
1594 }
1595 
1596 /*@
1597   DMPlexPartitionLabelClosure - Add the closure of all points to the partition label
1598 
1599   Input Parameters:
1600 + dm     - The DM
1601 - label  - DMLabel assinging ranks to remote roots
1602 
1603   Level: developer
1604 
1605 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1606 @*/
1607 PetscErrorCode DMPlexPartitionLabelClosure(DM dm, DMLabel label)
1608 {
1609   IS              rankIS,   pointIS;
1610   const PetscInt *ranks,   *points;
1611   PetscInt        numRanks, numPoints, r, p, c, closureSize;
1612   PetscInt       *closure = NULL;
1613   PetscErrorCode  ierr;
1614 
1615   PetscFunctionBegin;
1616   ierr = DMLabelGetValueIS(label, &rankIS);CHKERRQ(ierr);
1617   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
1618   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
1619   for (r = 0; r < numRanks; ++r) {
1620     const PetscInt rank = ranks[r];
1621 
1622     ierr = DMLabelGetStratumIS(label, rank, &pointIS);CHKERRQ(ierr);
1623     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
1624     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
1625     for (p = 0; p < numPoints; ++p) {
1626       ierr = DMPlexGetTransitiveClosure(dm, points[p], PETSC_TRUE, &closureSize, &closure);CHKERRQ(ierr);
1627       for (c = 0; c < closureSize*2; c += 2) {
1628         ierr = DMLabelSetValue(label, closure[c], rank);CHKERRQ(ierr);
1629         ierr = DMPlexPartitionLabelClosure_Tree(dm,label,rank,closure[c],PETSC_TRUE,PETSC_TRUE);CHKERRQ(ierr);
1630       }
1631     }
1632     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
1633     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1634   }
1635   if (closure) {ierr = DMPlexRestoreTransitiveClosure(dm, 0, PETSC_TRUE, &closureSize, &closure);CHKERRQ(ierr);}
1636   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
1637   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
1638   PetscFunctionReturn(0);
1639 }
1640 
1641 /*@
1642   DMPlexPartitionLabelAdjacency - Add one level of adjacent points to the partition label
1643 
1644   Input Parameters:
1645 + dm     - The DM
1646 - label  - DMLabel assinging ranks to remote roots
1647 
1648   Level: developer
1649 
1650 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1651 @*/
1652 PetscErrorCode DMPlexPartitionLabelAdjacency(DM dm, DMLabel label)
1653 {
1654   IS              rankIS,   pointIS;
1655   const PetscInt *ranks,   *points;
1656   PetscInt        numRanks, numPoints, r, p, a, adjSize;
1657   PetscInt       *adj = NULL;
1658   PetscErrorCode  ierr;
1659 
1660   PetscFunctionBegin;
1661   ierr = DMLabelGetValueIS(label, &rankIS);CHKERRQ(ierr);
1662   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
1663   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
1664   for (r = 0; r < numRanks; ++r) {
1665     const PetscInt rank = ranks[r];
1666 
1667     ierr = DMLabelGetStratumIS(label, rank, &pointIS);CHKERRQ(ierr);
1668     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
1669     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
1670     for (p = 0; p < numPoints; ++p) {
1671       adjSize = PETSC_DETERMINE;
1672       ierr = DMPlexGetAdjacency(dm, points[p], &adjSize, &adj);CHKERRQ(ierr);
1673       for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(label, adj[a], rank);CHKERRQ(ierr);}
1674     }
1675     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
1676     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1677   }
1678   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
1679   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
1680   ierr = PetscFree(adj);CHKERRQ(ierr);
1681   PetscFunctionReturn(0);
1682 }
1683 
1684 /*@
1685   DMPlexPartitionLabelPropagate - Propagate points in a partition label over the point SF
1686 
1687   Input Parameters:
1688 + dm     - The DM
1689 - label  - DMLabel assinging ranks to remote roots
1690 
1691   Level: developer
1692 
1693   Note: This is required when generating multi-level overlaps to capture
1694   overlap points from non-neighbouring partitions.
1695 
1696 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1697 @*/
1698 PetscErrorCode DMPlexPartitionLabelPropagate(DM dm, DMLabel label)
1699 {
1700   MPI_Comm        comm;
1701   PetscMPIInt     rank;
1702   PetscSF         sfPoint;
1703   DMLabel         lblRoots, lblLeaves;
1704   IS              rankIS, pointIS;
1705   const PetscInt *ranks;
1706   PetscInt        numRanks, r;
1707   PetscErrorCode  ierr;
1708 
1709   PetscFunctionBegin;
1710   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
1711   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1712   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
1713   /* Pull point contributions from remote leaves into local roots */
1714   ierr = DMLabelGather(label, sfPoint, &lblLeaves);CHKERRQ(ierr);
1715   ierr = DMLabelGetValueIS(lblLeaves, &rankIS);CHKERRQ(ierr);
1716   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
1717   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
1718   for (r = 0; r < numRanks; ++r) {
1719     const PetscInt remoteRank = ranks[r];
1720     if (remoteRank == rank) continue;
1721     ierr = DMLabelGetStratumIS(lblLeaves, remoteRank, &pointIS);CHKERRQ(ierr);
1722     ierr = DMLabelInsertIS(label, pointIS, remoteRank);CHKERRQ(ierr);
1723     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1724   }
1725   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
1726   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
1727   ierr = DMLabelDestroy(&lblLeaves);CHKERRQ(ierr);
1728   /* Push point contributions from roots into remote leaves */
1729   ierr = DMLabelDistribute(label, sfPoint, &lblRoots);CHKERRQ(ierr);
1730   ierr = DMLabelGetValueIS(lblRoots, &rankIS);CHKERRQ(ierr);
1731   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
1732   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
1733   for (r = 0; r < numRanks; ++r) {
1734     const PetscInt remoteRank = ranks[r];
1735     if (remoteRank == rank) continue;
1736     ierr = DMLabelGetStratumIS(lblRoots, remoteRank, &pointIS);CHKERRQ(ierr);
1737     ierr = DMLabelInsertIS(label, pointIS, remoteRank);CHKERRQ(ierr);
1738     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1739   }
1740   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
1741   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
1742   ierr = DMLabelDestroy(&lblRoots);CHKERRQ(ierr);
1743   PetscFunctionReturn(0);
1744 }
1745 
1746 /*@
1747   DMPlexPartitionLabelInvert - Create a partition label of remote roots from a local root label
1748 
1749   Input Parameters:
1750 + dm        - The DM
1751 . rootLabel - DMLabel assinging ranks to local roots
1752 . processSF - A star forest mapping into the local index on each remote rank
1753 
1754   Output Parameter:
1755 - leafLabel - DMLabel assinging ranks to remote roots
1756 
1757   Note: The rootLabel defines a send pattern by mapping local points to remote target ranks. The
1758   resulting leafLabel is a receiver mapping of remote roots to their parent rank.
1759 
1760   Level: developer
1761 
1762 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1763 @*/
1764 PetscErrorCode DMPlexPartitionLabelInvert(DM dm, DMLabel rootLabel, PetscSF processSF, DMLabel leafLabel)
1765 {
1766   MPI_Comm           comm;
1767   PetscMPIInt        rank, size;
1768   PetscInt           p, n, numNeighbors, ssize, l, nleaves;
1769   PetscSF            sfPoint;
1770   PetscSFNode       *rootPoints, *leafPoints;
1771   PetscSection       rootSection, leafSection;
1772   const PetscSFNode *remote;
1773   const PetscInt    *local, *neighbors;
1774   IS                 valueIS;
1775   PetscErrorCode     ierr;
1776 
1777   PetscFunctionBegin;
1778   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
1779   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1780   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
1781   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
1782 
1783   /* Convert to (point, rank) and use actual owners */
1784   ierr = PetscSectionCreate(comm, &rootSection);CHKERRQ(ierr);
1785   ierr = PetscSectionSetChart(rootSection, 0, size);CHKERRQ(ierr);
1786   ierr = DMLabelGetValueIS(rootLabel, &valueIS);CHKERRQ(ierr);
1787   ierr = ISGetLocalSize(valueIS, &numNeighbors);CHKERRQ(ierr);
1788   ierr = ISGetIndices(valueIS, &neighbors);CHKERRQ(ierr);
1789   for (n = 0; n < numNeighbors; ++n) {
1790     PetscInt numPoints;
1791 
1792     ierr = DMLabelGetStratumSize(rootLabel, neighbors[n], &numPoints);CHKERRQ(ierr);
1793     ierr = PetscSectionAddDof(rootSection, neighbors[n], numPoints);CHKERRQ(ierr);
1794   }
1795   ierr = PetscSectionSetUp(rootSection);CHKERRQ(ierr);
1796   ierr = PetscSectionGetStorageSize(rootSection, &ssize);CHKERRQ(ierr);
1797   ierr = PetscMalloc1(ssize, &rootPoints);CHKERRQ(ierr);
1798   ierr = PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);CHKERRQ(ierr);
1799   for (n = 0; n < numNeighbors; ++n) {
1800     IS              pointIS;
1801     const PetscInt *points;
1802     PetscInt        off, numPoints, p;
1803 
1804     ierr = PetscSectionGetOffset(rootSection, neighbors[n], &off);CHKERRQ(ierr);
1805     ierr = DMLabelGetStratumIS(rootLabel, neighbors[n], &pointIS);CHKERRQ(ierr);
1806     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
1807     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
1808     for (p = 0; p < numPoints; ++p) {
1809       if (local) {ierr = PetscFindInt(points[p], nleaves, local, &l);CHKERRQ(ierr);}
1810       else       {l = -1;}
1811       if (l >= 0) {rootPoints[off+p] = remote[l];}
1812       else        {rootPoints[off+p].index = points[p]; rootPoints[off+p].rank = rank;}
1813     }
1814     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
1815     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1816   }
1817   ierr = ISRestoreIndices(valueIS, &neighbors);CHKERRQ(ierr);
1818   ierr = ISDestroy(&valueIS);CHKERRQ(ierr);
1819   /* Communicate overlap */
1820   ierr = PetscSectionCreate(comm, &leafSection);CHKERRQ(ierr);
1821   ierr = DMPlexDistributeData(dm, processSF, rootSection, MPIU_2INT, rootPoints, leafSection, (void**) &leafPoints);CHKERRQ(ierr);
1822   /* Filter remote contributions (ovLeafPoints) into the overlapSF */
1823   ierr = PetscSectionGetStorageSize(leafSection, &ssize);CHKERRQ(ierr);
1824   for (p = 0; p < ssize; p++) {
1825     ierr = DMLabelSetValue(leafLabel, leafPoints[p].index, leafPoints[p].rank);CHKERRQ(ierr);
1826   }
1827   ierr = PetscFree(rootPoints);CHKERRQ(ierr);
1828   ierr = PetscSectionDestroy(&rootSection);CHKERRQ(ierr);
1829   ierr = PetscFree(leafPoints);CHKERRQ(ierr);
1830   ierr = PetscSectionDestroy(&leafSection);CHKERRQ(ierr);
1831   PetscFunctionReturn(0);
1832 }
1833 
1834 /*@
1835   DMPlexPartitionLabelCreateSF - Create a star forest from a label that assigns ranks to points
1836 
1837   Input Parameters:
1838 + dm    - The DM
1839 . label - DMLabel assinging ranks to remote roots
1840 
1841   Output Parameter:
1842 - sf    - The star forest communication context encapsulating the defined mapping
1843 
1844   Note: The incoming label is a receiver mapping of remote points to their parent rank.
1845 
1846   Level: developer
1847 
1848 .seealso: DMPlexDistribute(), DMPlexCreateOverlap
1849 @*/
1850 PetscErrorCode DMPlexPartitionLabelCreateSF(DM dm, DMLabel label, PetscSF *sf)
1851 {
1852   PetscMPIInt     rank, size;
1853   PetscInt        n, numRemote, p, numPoints, pStart, pEnd, idx = 0;
1854   PetscSFNode    *remotePoints;
1855   IS              remoteRootIS;
1856   const PetscInt *remoteRoots;
1857   PetscErrorCode ierr;
1858 
1859   PetscFunctionBegin;
1860   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
1861   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &size);CHKERRQ(ierr);
1862 
1863   for (numRemote = 0, n = 0; n < size; ++n) {
1864     ierr = DMLabelGetStratumSize(label, n, &numPoints);CHKERRQ(ierr);
1865     numRemote += numPoints;
1866   }
1867   ierr = PetscMalloc1(numRemote, &remotePoints);CHKERRQ(ierr);
1868   /* Put owned points first */
1869   ierr = DMLabelGetStratumSize(label, rank, &numPoints);CHKERRQ(ierr);
1870   if (numPoints > 0) {
1871     ierr = DMLabelGetStratumIS(label, rank, &remoteRootIS);CHKERRQ(ierr);
1872     ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
1873     for (p = 0; p < numPoints; p++) {
1874       remotePoints[idx].index = remoteRoots[p];
1875       remotePoints[idx].rank = rank;
1876       idx++;
1877     }
1878     ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
1879     ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr);
1880   }
1881   /* Now add remote points */
1882   for (n = 0; n < size; ++n) {
1883     ierr = DMLabelGetStratumSize(label, n, &numPoints);CHKERRQ(ierr);
1884     if (numPoints <= 0 || n == rank) continue;
1885     ierr = DMLabelGetStratumIS(label, n, &remoteRootIS);CHKERRQ(ierr);
1886     ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
1887     for (p = 0; p < numPoints; p++) {
1888       remotePoints[idx].index = remoteRoots[p];
1889       remotePoints[idx].rank = n;
1890       idx++;
1891     }
1892     ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
1893     ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr);
1894   }
1895   ierr = PetscSFCreate(PetscObjectComm((PetscObject) dm), sf);CHKERRQ(ierr);
1896   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
1897   ierr = PetscSFSetGraph(*sf, pEnd-pStart, numRemote, NULL, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);CHKERRQ(ierr);
1898   PetscFunctionReturn(0);
1899 }
1900