xref: /petsc/src/dm/impls/plex/plexpartition.c (revision 12521bc7fd79d2e629b1f82326b30f09cd2e12af)
1 #include <petsc/private/dmpleximpl.h>   /*I      "petscdmplex.h"   I*/
2 
3 PetscClassId PETSCPARTITIONER_CLASSID = 0;
4 
5 PetscFunctionList PetscPartitionerList              = NULL;
6 PetscBool         PetscPartitionerRegisterAllCalled = PETSC_FALSE;
7 
8 PetscBool ChacoPartitionercite = PETSC_FALSE;
9 const char ChacoPartitionerCitation[] = "@inproceedings{Chaco95,\n"
10                                "  author    = {Bruce Hendrickson and Robert Leland},\n"
11                                "  title     = {A multilevel algorithm for partitioning graphs},\n"
12                                "  booktitle = {Supercomputing '95: Proceedings of the 1995 ACM/IEEE Conference on Supercomputing (CDROM)},"
13                                "  isbn      = {0-89791-816-9},\n"
14                                "  pages     = {28},\n"
15                                "  doi       = {http://doi.acm.org/10.1145/224170.224228},\n"
16                                "  publisher = {ACM Press},\n"
17                                "  address   = {New York},\n"
18                                "  year      = {1995}\n}\n";
19 
20 PetscBool ParMetisPartitionercite = PETSC_FALSE;
21 const char ParMetisPartitionerCitation[] = "@article{KarypisKumar98,\n"
22                                "  author  = {George Karypis and Vipin Kumar},\n"
23                                "  title   = {A Parallel Algorithm for Multilevel Graph Partitioning and Sparse Matrix Ordering},\n"
24                                "  journal = {Journal of Parallel and Distributed Computing},\n"
25                                "  volume  = {48},\n"
26                                "  pages   = {71--85},\n"
27                                "  year    = {1998}\n}\n";
28 
29 /*@C
30   DMPlexCreatePartitionerGraph - Create a CSR graph of point connections for the partitioner
31 
32   Input Parameters:
33 + dm      - The mesh DM dm
34 - height  - Height of the strata from which to construct the graph
35 
36   Output Parameter:
37 + numVertices     - Number of vertices in the graph
38 . offsets         - Point offsets in the graph
39 . adjacency       - Point connectivity in the graph
40 - globalNumbering - A map from the local cell numbering to the global numbering used in "adjacency".  Negative indicates that the cell is a duplicate from another process.
41 
42   The user can control the definition of adjacency for the mesh using DMPlexGetAdjacencyUseCone() and
43   DMPlexSetAdjacencyUseClosure(). They should choose the combination appropriate for the function
44   representation on the mesh.
45 
46   Level: developer
47 
48 .seealso: PetscPartitionerGetType(), PetscPartitionerCreate(), DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure()
49 @*/
50 PetscErrorCode DMPlexCreatePartitionerGraph(DM dm, PetscInt height, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency, IS *globalNumbering)
51 {
52   PetscInt       p, pStart, pEnd, a, adjSize, idx, size, nroots;
53   PetscInt      *adj = NULL, *vOffsets = NULL, *graph = NULL;
54   IS             cellNumbering;
55   const PetscInt *cellNum;
56   PetscBool      useCone, useClosure;
57   PetscSection   section;
58   PetscSegBuffer adjBuffer;
59   PetscSF        sfPoint;
60   PetscMPIInt    rank;
61   PetscErrorCode ierr;
62 
63   PetscFunctionBegin;
64   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
65   ierr = DMPlexGetHeightStratum(dm, height, &pStart, &pEnd);CHKERRQ(ierr);
66   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
67   ierr = PetscSFGetGraph(sfPoint, &nroots, NULL, NULL, NULL);CHKERRQ(ierr);
68   /* Build adjacency graph via a section/segbuffer */
69   ierr = PetscSectionCreate(PetscObjectComm((PetscObject) dm), &section);CHKERRQ(ierr);
70   ierr = PetscSectionSetChart(section, pStart, pEnd);CHKERRQ(ierr);
71   ierr = PetscSegBufferCreate(sizeof(PetscInt),1000,&adjBuffer);CHKERRQ(ierr);
72   /* Always use FVM adjacency to create partitioner graph */
73   ierr = DMPlexGetAdjacencyUseCone(dm, &useCone);CHKERRQ(ierr);
74   ierr = DMPlexGetAdjacencyUseClosure(dm, &useClosure);CHKERRQ(ierr);
75   ierr = DMPlexSetAdjacencyUseCone(dm, PETSC_TRUE);CHKERRQ(ierr);
76   ierr = DMPlexSetAdjacencyUseClosure(dm, PETSC_FALSE);CHKERRQ(ierr);
77   ierr = DMPlexCreateCellNumbering_Internal(dm, PETSC_TRUE, &cellNumbering);CHKERRQ(ierr);
78   if (globalNumbering) {
79     ierr = PetscObjectReference((PetscObject)cellNumbering);CHKERRQ(ierr);
80     *globalNumbering = cellNumbering;
81   }
82   ierr = ISGetIndices(cellNumbering, &cellNum);CHKERRQ(ierr);
83   for (*numVertices = 0, p = pStart; p < pEnd; p++) {
84     /* Skip non-owned cells in parallel (ParMetis expects no overlap) */
85     if (nroots > 0) {if (cellNum[p] < 0) continue;}
86     adjSize = PETSC_DETERMINE;
87     ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
88     for (a = 0; a < adjSize; ++a) {
89       const PetscInt point = adj[a];
90       if (point != p && pStart <= point && point < pEnd) {
91         PetscInt *PETSC_RESTRICT pBuf;
92         ierr = PetscSectionAddDof(section, p, 1);CHKERRQ(ierr);
93         ierr = PetscSegBufferGetInts(adjBuffer, 1, &pBuf);CHKERRQ(ierr);
94         *pBuf = point;
95       }
96     }
97     (*numVertices)++;
98   }
99   ierr = DMPlexSetAdjacencyUseCone(dm, useCone);CHKERRQ(ierr);
100   ierr = DMPlexSetAdjacencyUseClosure(dm, useClosure);CHKERRQ(ierr);
101   /* Derive CSR graph from section/segbuffer */
102   ierr = PetscSectionSetUp(section);CHKERRQ(ierr);
103   ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr);
104   ierr = PetscMalloc1(*numVertices+1, &vOffsets);CHKERRQ(ierr);
105   for (idx = 0, p = pStart; p < pEnd; p++) {
106     if (nroots > 0) {if (cellNum[p] < 0) continue;}
107     ierr = PetscSectionGetOffset(section, p, &(vOffsets[idx++]));CHKERRQ(ierr);
108   }
109   vOffsets[*numVertices] = size;
110   if (offsets) *offsets = vOffsets;
111   ierr = PetscSegBufferExtractAlloc(adjBuffer, &graph);CHKERRQ(ierr);
112   {
113     ISLocalToGlobalMapping ltogCells;
114     PetscInt n, size, *cells_arr;
115     /* In parallel, apply a global cell numbering to the graph */
116     ierr = ISRestoreIndices(cellNumbering, &cellNum);CHKERRQ(ierr);
117     ierr = ISLocalToGlobalMappingCreateIS(cellNumbering, &ltogCells);CHKERRQ(ierr);
118     ierr = ISLocalToGlobalMappingGetSize(ltogCells, &size);CHKERRQ(ierr);
119     ierr = ISLocalToGlobalMappingGetIndices(ltogCells, (const PetscInt**)&cells_arr);CHKERRQ(ierr);
120     /* Convert to positive global cell numbers */
121     for (n=0; n<size; n++) {if (cells_arr[n] < 0) cells_arr[n] = -(cells_arr[n]+1);}
122     ierr = ISLocalToGlobalMappingRestoreIndices(ltogCells, (const PetscInt**)&cells_arr);CHKERRQ(ierr);
123     ierr = ISLocalToGlobalMappingApplyBlock(ltogCells, vOffsets[*numVertices], graph, graph);CHKERRQ(ierr);
124     ierr = ISLocalToGlobalMappingDestroy(&ltogCells);CHKERRQ(ierr);
125     ierr = ISDestroy(&cellNumbering);CHKERRQ(ierr);
126   }
127   if (adjacency) *adjacency = graph;
128   /* Clean up */
129   ierr = PetscSegBufferDestroy(&adjBuffer);CHKERRQ(ierr);
130   ierr = PetscSectionDestroy(&section);CHKERRQ(ierr);
131   ierr = PetscFree(adj);CHKERRQ(ierr);
132   PetscFunctionReturn(0);
133 }
134 
135 /*@C
136   DMPlexCreateNeighborCSR - Create a mesh graph (cell-cell adjacency) in parallel CSR format.
137 
138   Collective
139 
140   Input Arguments:
141 + dm - The DMPlex
142 - cellHeight - The height of mesh points to treat as cells (default should be 0)
143 
144   Output Arguments:
145 + numVertices - The number of local vertices in the graph, or cells in the mesh.
146 . offsets     - The offset to the adjacency list for each cell
147 - adjacency   - The adjacency list for all cells
148 
149   Note: This is suitable for input to a mesh partitioner like ParMetis.
150 
151   Level: advanced
152 
153 .seealso: DMPlexCreate()
154 @*/
155 PetscErrorCode DMPlexCreateNeighborCSR(DM dm, PetscInt cellHeight, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency)
156 {
157   const PetscInt maxFaceCases = 30;
158   PetscInt       numFaceCases = 0;
159   PetscInt       numFaceVertices[30]; /* maxFaceCases, C89 sucks sucks sucks */
160   PetscInt      *off, *adj;
161   PetscInt      *neighborCells = NULL;
162   PetscInt       dim, cellDim, depth = 0, faceDepth, cStart, cEnd, c, numCells, cell;
163   PetscErrorCode ierr;
164 
165   PetscFunctionBegin;
166   /* For parallel partitioning, I think you have to communicate supports */
167   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
168   cellDim = dim - cellHeight;
169   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
170   ierr = DMPlexGetHeightStratum(dm, cellHeight, &cStart, &cEnd);CHKERRQ(ierr);
171   if (cEnd - cStart == 0) {
172     if (numVertices) *numVertices = 0;
173     if (offsets)   *offsets   = NULL;
174     if (adjacency) *adjacency = NULL;
175     PetscFunctionReturn(0);
176   }
177   numCells  = cEnd - cStart;
178   faceDepth = depth - cellHeight;
179   if (dim == depth) {
180     PetscInt f, fStart, fEnd;
181 
182     ierr = PetscCalloc1(numCells+1, &off);CHKERRQ(ierr);
183     /* Count neighboring cells */
184     ierr = DMPlexGetHeightStratum(dm, cellHeight+1, &fStart, &fEnd);CHKERRQ(ierr);
185     for (f = fStart; f < fEnd; ++f) {
186       const PetscInt *support;
187       PetscInt        supportSize;
188       ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
189       ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
190       if (supportSize == 2) {
191         PetscInt numChildren;
192 
193         ierr = DMPlexGetTreeChildren(dm,f,&numChildren,NULL);CHKERRQ(ierr);
194         if (!numChildren) {
195           ++off[support[0]-cStart+1];
196           ++off[support[1]-cStart+1];
197         }
198       }
199     }
200     /* Prefix sum */
201     for (c = 1; c <= numCells; ++c) off[c] += off[c-1];
202     if (adjacency) {
203       PetscInt *tmp;
204 
205       ierr = PetscMalloc1(off[numCells], &adj);CHKERRQ(ierr);
206       ierr = PetscMalloc1(numCells+1, &tmp);CHKERRQ(ierr);
207       ierr = PetscMemcpy(tmp, off, (numCells+1) * sizeof(PetscInt));CHKERRQ(ierr);
208       /* Get neighboring cells */
209       for (f = fStart; f < fEnd; ++f) {
210         const PetscInt *support;
211         PetscInt        supportSize;
212         ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
213         ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
214         if (supportSize == 2) {
215           PetscInt numChildren;
216 
217           ierr = DMPlexGetTreeChildren(dm,f,&numChildren,NULL);CHKERRQ(ierr);
218           if (!numChildren) {
219             adj[tmp[support[0]-cStart]++] = support[1];
220             adj[tmp[support[1]-cStart]++] = support[0];
221           }
222         }
223       }
224 #if defined(PETSC_USE_DEBUG)
225       for (c = 0; c < cEnd-cStart; ++c) if (tmp[c] != off[c+1]) SETERRQ3(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Offset %d != %d for cell %d", tmp[c], off[c], c+cStart);
226 #endif
227       ierr = PetscFree(tmp);CHKERRQ(ierr);
228     }
229     if (numVertices) *numVertices = numCells;
230     if (offsets)   *offsets   = off;
231     if (adjacency) *adjacency = adj;
232     PetscFunctionReturn(0);
233   }
234   /* Setup face recognition */
235   if (faceDepth == 1) {
236     PetscInt cornersSeen[30] = {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0}; /* Could use PetscBT */
237 
238     for (c = cStart; c < cEnd; ++c) {
239       PetscInt corners;
240 
241       ierr = DMPlexGetConeSize(dm, c, &corners);CHKERRQ(ierr);
242       if (!cornersSeen[corners]) {
243         PetscInt nFV;
244 
245         if (numFaceCases >= maxFaceCases) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_PLIB, "Exceeded maximum number of face recognition cases");
246         cornersSeen[corners] = 1;
247 
248         ierr = DMPlexGetNumFaceVertices(dm, cellDim, corners, &nFV);CHKERRQ(ierr);
249 
250         numFaceVertices[numFaceCases++] = nFV;
251       }
252     }
253   }
254   ierr = PetscCalloc1(numCells+1, &off);CHKERRQ(ierr);
255   /* Count neighboring cells */
256   for (cell = cStart; cell < cEnd; ++cell) {
257     PetscInt numNeighbors = PETSC_DETERMINE, n;
258 
259     ierr = DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);CHKERRQ(ierr);
260     /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
261     for (n = 0; n < numNeighbors; ++n) {
262       PetscInt        cellPair[2];
263       PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
264       PetscInt        meetSize = 0;
265       const PetscInt *meet    = NULL;
266 
267       cellPair[0] = cell; cellPair[1] = neighborCells[n];
268       if (cellPair[0] == cellPair[1]) continue;
269       if (!found) {
270         ierr = DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
271         if (meetSize) {
272           PetscInt f;
273 
274           for (f = 0; f < numFaceCases; ++f) {
275             if (numFaceVertices[f] == meetSize) {
276               found = PETSC_TRUE;
277               break;
278             }
279           }
280         }
281         ierr = DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
282       }
283       if (found) ++off[cell-cStart+1];
284     }
285   }
286   /* Prefix sum */
287   for (cell = 1; cell <= numCells; ++cell) off[cell] += off[cell-1];
288 
289   if (adjacency) {
290     ierr = PetscMalloc1(off[numCells], &adj);CHKERRQ(ierr);
291     /* Get neighboring cells */
292     for (cell = cStart; cell < cEnd; ++cell) {
293       PetscInt numNeighbors = PETSC_DETERMINE, n;
294       PetscInt cellOffset   = 0;
295 
296       ierr = DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);CHKERRQ(ierr);
297       /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
298       for (n = 0; n < numNeighbors; ++n) {
299         PetscInt        cellPair[2];
300         PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
301         PetscInt        meetSize = 0;
302         const PetscInt *meet    = NULL;
303 
304         cellPair[0] = cell; cellPair[1] = neighborCells[n];
305         if (cellPair[0] == cellPair[1]) continue;
306         if (!found) {
307           ierr = DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
308           if (meetSize) {
309             PetscInt f;
310 
311             for (f = 0; f < numFaceCases; ++f) {
312               if (numFaceVertices[f] == meetSize) {
313                 found = PETSC_TRUE;
314                 break;
315               }
316             }
317           }
318           ierr = DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
319         }
320         if (found) {
321           adj[off[cell-cStart]+cellOffset] = neighborCells[n];
322           ++cellOffset;
323         }
324       }
325     }
326   }
327   ierr = PetscFree(neighborCells);CHKERRQ(ierr);
328   if (numVertices) *numVertices = numCells;
329   if (offsets)   *offsets   = off;
330   if (adjacency) *adjacency = adj;
331   PetscFunctionReturn(0);
332 }
333 
334 /*@C
335   PetscPartitionerRegister - Adds a new PetscPartitioner implementation
336 
337   Not Collective
338 
339   Input Parameters:
340 + name        - The name of a new user-defined creation routine
341 - create_func - The creation routine itself
342 
343   Notes:
344   PetscPartitionerRegister() may be called multiple times to add several user-defined PetscPartitioners
345 
346   Sample usage:
347 .vb
348     PetscPartitionerRegister("my_part", MyPetscPartitionerCreate);
349 .ve
350 
351   Then, your PetscPartitioner type can be chosen with the procedural interface via
352 .vb
353     PetscPartitionerCreate(MPI_Comm, PetscPartitioner *);
354     PetscPartitionerSetType(PetscPartitioner, "my_part");
355 .ve
356    or at runtime via the option
357 .vb
358     -petscpartitioner_type my_part
359 .ve
360 
361   Level: advanced
362 
363 .keywords: PetscPartitioner, register
364 .seealso: PetscPartitionerRegisterAll(), PetscPartitionerRegisterDestroy()
365 
366 @*/
367 PetscErrorCode PetscPartitionerRegister(const char sname[], PetscErrorCode (*function)(PetscPartitioner))
368 {
369   PetscErrorCode ierr;
370 
371   PetscFunctionBegin;
372   ierr = PetscFunctionListAdd(&PetscPartitionerList, sname, function);CHKERRQ(ierr);
373   PetscFunctionReturn(0);
374 }
375 
376 /*@C
377   PetscPartitionerSetType - Builds a particular PetscPartitioner
378 
379   Collective on PetscPartitioner
380 
381   Input Parameters:
382 + part - The PetscPartitioner object
383 - name - The kind of partitioner
384 
385   Options Database Key:
386 . -petscpartitioner_type <type> - Sets the PetscPartitioner type; use -help for a list of available types
387 
388   Level: intermediate
389 
390 .keywords: PetscPartitioner, set, type
391 .seealso: PetscPartitionerGetType(), PetscPartitionerCreate()
392 @*/
393 PetscErrorCode PetscPartitionerSetType(PetscPartitioner part, PetscPartitionerType name)
394 {
395   PetscErrorCode (*r)(PetscPartitioner);
396   PetscBool      match;
397   PetscErrorCode ierr;
398 
399   PetscFunctionBegin;
400   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
401   ierr = PetscObjectTypeCompare((PetscObject) part, name, &match);CHKERRQ(ierr);
402   if (match) PetscFunctionReturn(0);
403 
404   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
405   ierr = PetscFunctionListFind(PetscPartitionerList, name, &r);CHKERRQ(ierr);
406   if (!r) SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown PetscPartitioner type: %s", name);
407 
408   if (part->ops->destroy) {
409     ierr              = (*part->ops->destroy)(part);CHKERRQ(ierr);
410     part->ops->destroy = NULL;
411   }
412   ierr = (*r)(part);CHKERRQ(ierr);
413   ierr = PetscObjectChangeTypeName((PetscObject) part, name);CHKERRQ(ierr);
414   PetscFunctionReturn(0);
415 }
416 
417 /*@C
418   PetscPartitionerGetType - Gets the PetscPartitioner type name (as a string) from the object.
419 
420   Not Collective
421 
422   Input Parameter:
423 . part - The PetscPartitioner
424 
425   Output Parameter:
426 . name - The PetscPartitioner type name
427 
428   Level: intermediate
429 
430 .keywords: PetscPartitioner, get, type, name
431 .seealso: PetscPartitionerSetType(), PetscPartitionerCreate()
432 @*/
433 PetscErrorCode PetscPartitionerGetType(PetscPartitioner part, PetscPartitionerType *name)
434 {
435   PetscErrorCode ierr;
436 
437   PetscFunctionBegin;
438   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
439   PetscValidPointer(name, 2);
440   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
441   *name = ((PetscObject) part)->type_name;
442   PetscFunctionReturn(0);
443 }
444 
445 /*@C
446   PetscPartitionerView - Views a PetscPartitioner
447 
448   Collective on PetscPartitioner
449 
450   Input Parameter:
451 + part - the PetscPartitioner object to view
452 - v    - the viewer
453 
454   Level: developer
455 
456 .seealso: PetscPartitionerDestroy()
457 @*/
458 PetscErrorCode PetscPartitionerView(PetscPartitioner part, PetscViewer v)
459 {
460   PetscErrorCode ierr;
461 
462   PetscFunctionBegin;
463   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
464   if (!v) {ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject) part), &v);CHKERRQ(ierr);}
465   if (part->ops->view) {ierr = (*part->ops->view)(part, v);CHKERRQ(ierr);}
466   PetscFunctionReturn(0);
467 }
468 
469 PetscErrorCode PetscPartitionerSetTypeFromOptions_Internal(PetscPartitioner part)
470 {
471   const char    *defaultType;
472   char           name[256];
473   PetscBool      flg;
474   PetscErrorCode ierr;
475 
476   PetscFunctionBegin;
477   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
478   if (!((PetscObject) part)->type_name)
479 #if defined(PETSC_HAVE_CHACO)
480     defaultType = PETSCPARTITIONERCHACO;
481 #elif defined(PETSC_HAVE_PARMETIS)
482     defaultType = PETSCPARTITIONERPARMETIS;
483 #else
484     defaultType = PETSCPARTITIONERSIMPLE;
485 #endif
486   else
487     defaultType = ((PetscObject) part)->type_name;
488   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
489 
490   ierr = PetscObjectOptionsBegin((PetscObject) part);CHKERRQ(ierr);
491   ierr = PetscOptionsFList("-petscpartitioner_type", "Graph partitioner", "PetscPartitionerSetType", PetscPartitionerList, defaultType, name, 256, &flg);CHKERRQ(ierr);
492   if (flg) {
493     ierr = PetscPartitionerSetType(part, name);CHKERRQ(ierr);
494   } else if (!((PetscObject) part)->type_name) {
495     ierr = PetscPartitionerSetType(part, defaultType);CHKERRQ(ierr);
496   }
497   ierr = PetscOptionsEnd();CHKERRQ(ierr);
498   PetscFunctionReturn(0);
499 }
500 
501 /*@
502   PetscPartitionerSetFromOptions - sets parameters in a PetscPartitioner from the options database
503 
504   Collective on PetscPartitioner
505 
506   Input Parameter:
507 . part - the PetscPartitioner object to set options for
508 
509   Level: developer
510 
511 .seealso: PetscPartitionerView()
512 @*/
513 PetscErrorCode PetscPartitionerSetFromOptions(PetscPartitioner part)
514 {
515   PetscErrorCode ierr;
516 
517   PetscFunctionBegin;
518   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
519   ierr = PetscPartitionerSetTypeFromOptions_Internal(part);CHKERRQ(ierr);
520 
521   ierr = PetscObjectOptionsBegin((PetscObject) part);CHKERRQ(ierr);
522   if (part->ops->setfromoptions) {ierr = (*part->ops->setfromoptions)(PetscOptionsObject,part);CHKERRQ(ierr);}
523   /* process any options handlers added with PetscObjectAddOptionsHandler() */
524   ierr = PetscObjectProcessOptionsHandlers(PetscOptionsObject,(PetscObject) part);CHKERRQ(ierr);
525   ierr = PetscOptionsEnd();CHKERRQ(ierr);
526   ierr = PetscPartitionerViewFromOptions(part, NULL, "-petscpartitioner_view");CHKERRQ(ierr);
527   PetscFunctionReturn(0);
528 }
529 
530 /*@C
531   PetscPartitionerSetUp - Construct data structures for the PetscPartitioner
532 
533   Collective on PetscPartitioner
534 
535   Input Parameter:
536 . part - the PetscPartitioner object to setup
537 
538   Level: developer
539 
540 .seealso: PetscPartitionerView(), PetscPartitionerDestroy()
541 @*/
542 PetscErrorCode PetscPartitionerSetUp(PetscPartitioner part)
543 {
544   PetscErrorCode ierr;
545 
546   PetscFunctionBegin;
547   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
548   if (part->ops->setup) {ierr = (*part->ops->setup)(part);CHKERRQ(ierr);}
549   PetscFunctionReturn(0);
550 }
551 
552 /*@
553   PetscPartitionerDestroy - Destroys a PetscPartitioner object
554 
555   Collective on PetscPartitioner
556 
557   Input Parameter:
558 . part - the PetscPartitioner object to destroy
559 
560   Level: developer
561 
562 .seealso: PetscPartitionerView()
563 @*/
564 PetscErrorCode PetscPartitionerDestroy(PetscPartitioner *part)
565 {
566   PetscErrorCode ierr;
567 
568   PetscFunctionBegin;
569   if (!*part) PetscFunctionReturn(0);
570   PetscValidHeaderSpecific((*part), PETSCPARTITIONER_CLASSID, 1);
571 
572   if (--((PetscObject)(*part))->refct > 0) {*part = 0; PetscFunctionReturn(0);}
573   ((PetscObject) (*part))->refct = 0;
574 
575   if ((*part)->ops->destroy) {ierr = (*(*part)->ops->destroy)(*part);CHKERRQ(ierr);}
576   ierr = PetscHeaderDestroy(part);CHKERRQ(ierr);
577   PetscFunctionReturn(0);
578 }
579 
580 /*@
581   PetscPartitionerCreate - Creates an empty PetscPartitioner object. The type can then be set with PetscPartitionerSetType().
582 
583   Collective on MPI_Comm
584 
585   Input Parameter:
586 . comm - The communicator for the PetscPartitioner object
587 
588   Output Parameter:
589 . part - The PetscPartitioner object
590 
591   Level: beginner
592 
593 .seealso: PetscPartitionerSetType(), PETSCPARTITIONERCHACO, PETSCPARTITIONERPARMETIS, PETSCPARTITIONERSHELL, PETSCPARTITIONERSIMPLE, PETSCPARTITIONERGATHER
594 @*/
595 PetscErrorCode PetscPartitionerCreate(MPI_Comm comm, PetscPartitioner *part)
596 {
597   PetscPartitioner p;
598   PetscErrorCode   ierr;
599 
600   PetscFunctionBegin;
601   PetscValidPointer(part, 2);
602   *part = NULL;
603   ierr = DMInitializePackage();CHKERRQ(ierr);
604 
605   ierr = PetscHeaderCreate(p, PETSCPARTITIONER_CLASSID, "PetscPartitioner", "Graph Partitioner", "PetscPartitioner", comm, PetscPartitionerDestroy, PetscPartitionerView);CHKERRQ(ierr);
606 
607   *part = p;
608   PetscFunctionReturn(0);
609 }
610 
611 /*@
612   PetscPartitionerPartition - Create a non-overlapping partition of the cells in the mesh
613 
614   Collective on DM
615 
616   Input Parameters:
617 + part    - The PetscPartitioner
618 - dm      - The mesh DM
619 
620   Output Parameters:
621 + partSection     - The PetscSection giving the division of points by partition
622 - partition       - The list of points by partition
623 
624   Note: Instead of cells, points at a given height can be partitioned by calling PetscPartitionerSetPointHeight()
625 
626   Level: developer
627 
628 .seealso DMPlexDistribute(), PetscPartitionerSetPointHeight(), PetscPartitionerCreate()
629 @*/
630 PetscErrorCode PetscPartitionerPartition(PetscPartitioner part, DM dm, PetscSection partSection, IS *partition)
631 {
632   PetscMPIInt    size;
633   PetscErrorCode ierr;
634 
635   PetscFunctionBegin;
636   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
637   PetscValidHeaderSpecific(dm, DM_CLASSID, 2);
638   PetscValidHeaderSpecific(partSection, PETSC_SECTION_CLASSID, 4);
639   PetscValidPointer(partition, 5);
640   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) part), &size);CHKERRQ(ierr);
641   if (size == 1) {
642     PetscInt *points;
643     PetscInt  cStart, cEnd, c;
644 
645     ierr = DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);CHKERRQ(ierr);
646     ierr = PetscSectionSetChart(partSection, 0, size);CHKERRQ(ierr);
647     ierr = PetscSectionSetDof(partSection, 0, cEnd-cStart);CHKERRQ(ierr);
648     ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
649     ierr = PetscMalloc1(cEnd-cStart, &points);CHKERRQ(ierr);
650     for (c = cStart; c < cEnd; ++c) points[c] = c;
651     ierr = ISCreateGeneral(PetscObjectComm((PetscObject) part), cEnd-cStart, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
652     PetscFunctionReturn(0);
653   }
654   /* Obvious cheating, but I cannot think of a better way to do this. The DMSetFromOptions() has refinement in it,
655      so we cannot call it and have it feed down to the partitioner before partitioning */
656   ierr = PetscPartitionerSetFromOptions(part);CHKERRQ(ierr);
657   if (part->height == 0) {
658     PetscInt numVertices;
659     PetscInt *start     = NULL;
660     PetscInt *adjacency = NULL;
661     IS       globalNumbering;
662 
663     ierr = DMPlexCreatePartitionerGraph(dm, 0, &numVertices, &start, &adjacency, &globalNumbering);CHKERRQ(ierr);
664     if (!part->ops->partition) SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_WRONGSTATE, "PetscPartitioner has no type");
665     ierr = (*part->ops->partition)(part, dm, size, numVertices, start, adjacency, partSection, partition);CHKERRQ(ierr);
666     ierr = PetscFree(start);CHKERRQ(ierr);
667     ierr = PetscFree(adjacency);CHKERRQ(ierr);
668     if (globalNumbering) { /* partition is wrt global unique numbering: change this to be wrt local numbering */
669       const PetscInt *globalNum;
670       const PetscInt *partIdx;
671       PetscInt *map, cStart, cEnd;
672       PetscInt *adjusted, i, localSize, offset;
673       IS    newPartition;
674 
675       ierr = ISGetLocalSize(*partition,&localSize);CHKERRQ(ierr);
676       ierr = PetscMalloc1(localSize,&adjusted);CHKERRQ(ierr);
677       ierr = ISGetIndices(globalNumbering,&globalNum);CHKERRQ(ierr);
678       ierr = ISGetIndices(*partition,&partIdx);CHKERRQ(ierr);
679       ierr = PetscMalloc1(localSize,&map);CHKERRQ(ierr);
680       ierr = DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);CHKERRQ(ierr);
681       for (i = cStart, offset = 0; i < cEnd; i++) {
682         if (globalNum[i - cStart] >= 0) map[offset++] = i;
683       }
684       for (i = 0; i < localSize; i++) {
685         adjusted[i] = map[partIdx[i]];
686       }
687       ierr = PetscFree(map);CHKERRQ(ierr);
688       ierr = ISRestoreIndices(*partition,&partIdx);CHKERRQ(ierr);
689       ierr = ISRestoreIndices(globalNumbering,&globalNum);CHKERRQ(ierr);
690       ierr = ISCreateGeneral(PETSC_COMM_SELF,localSize,adjusted,PETSC_OWN_POINTER,&newPartition);CHKERRQ(ierr);
691       ierr = ISDestroy(&globalNumbering);CHKERRQ(ierr);
692       ierr = ISDestroy(partition);CHKERRQ(ierr);
693       *partition = newPartition;
694     }
695   } else SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_OUTOFRANGE, "Invalid height %D for points to partition", part->height);
696   PetscFunctionReturn(0);
697 
698 }
699 
700 static PetscErrorCode PetscPartitionerDestroy_Shell(PetscPartitioner part)
701 {
702   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
703   PetscErrorCode          ierr;
704 
705   PetscFunctionBegin;
706   ierr = PetscSectionDestroy(&p->section);CHKERRQ(ierr);
707   ierr = ISDestroy(&p->partition);CHKERRQ(ierr);
708   ierr = PetscFree(p);CHKERRQ(ierr);
709   PetscFunctionReturn(0);
710 }
711 
712 static PetscErrorCode PetscPartitionerView_Shell_Ascii(PetscPartitioner part, PetscViewer viewer)
713 {
714   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
715   PetscViewerFormat       format;
716   PetscErrorCode          ierr;
717 
718   PetscFunctionBegin;
719   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
720   ierr = PetscViewerASCIIPrintf(viewer, "Shell Graph Partitioner:\n");CHKERRQ(ierr);
721   if (p->random) {
722     ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
723     ierr = PetscViewerASCIIPrintf(viewer, "using random partition\n");CHKERRQ(ierr);
724     ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
725   }
726   PetscFunctionReturn(0);
727 }
728 
729 static PetscErrorCode PetscPartitionerView_Shell(PetscPartitioner part, PetscViewer viewer)
730 {
731   PetscBool      iascii;
732   PetscErrorCode ierr;
733 
734   PetscFunctionBegin;
735   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
736   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
737   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
738   if (iascii) {ierr = PetscPartitionerView_Shell_Ascii(part, viewer);CHKERRQ(ierr);}
739   PetscFunctionReturn(0);
740 }
741 
742 static PetscErrorCode PetscPartitionerSetFromOptions_Shell(PetscOptionItems *PetscOptionsObject, PetscPartitioner part)
743 {
744   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
745   PetscErrorCode          ierr;
746 
747   PetscFunctionBegin;
748   ierr = PetscOptionsHead(PetscOptionsObject, "PetscPartitionerShell Options");CHKERRQ(ierr);
749   ierr = PetscOptionsBool("-petscpartitioner_shell_random", "Use a random partition", "PetscPartitionerView", PETSC_FALSE, &p->random, NULL);CHKERRQ(ierr);
750   ierr = PetscOptionsTail();CHKERRQ(ierr);
751   PetscFunctionReturn(0);
752 }
753 
754 static PetscErrorCode PetscPartitionerPartition_Shell(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
755 {
756   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
757   PetscInt                np;
758   PetscErrorCode          ierr;
759 
760   PetscFunctionBegin;
761   if (p->random) {
762     PetscRandom r;
763     PetscInt   *sizes, *points, v, p;
764     PetscMPIInt rank;
765 
766     ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
767     ierr = PetscRandomCreate(PETSC_COMM_SELF, &r);CHKERRQ(ierr);
768     ierr = PetscRandomSetInterval(r, 0.0, (PetscScalar) nparts);CHKERRQ(ierr);
769     ierr = PetscRandomSetFromOptions(r);CHKERRQ(ierr);
770     ierr = PetscCalloc2(nparts, &sizes, numVertices, &points);CHKERRQ(ierr);
771     for (v = 0; v < numVertices; ++v) {points[v] = v;}
772     for (p = 0; p < nparts; ++p) {sizes[p] = numVertices/nparts + (PetscInt) (p < numVertices % nparts);}
773     for (v = numVertices-1; v > 0; --v) {
774       PetscReal val;
775       PetscInt  w, tmp;
776 
777       ierr = PetscRandomSetInterval(r, 0.0, (PetscScalar) (v+1));CHKERRQ(ierr);
778       ierr = PetscRandomGetValueReal(r, &val);CHKERRQ(ierr);
779       w    = PetscFloorReal(val);
780       tmp       = points[v];
781       points[v] = points[w];
782       points[w] = tmp;
783     }
784     ierr = PetscRandomDestroy(&r);CHKERRQ(ierr);
785     ierr = PetscPartitionerShellSetPartition(part, nparts, sizes, points);CHKERRQ(ierr);
786     ierr = PetscFree2(sizes, points);CHKERRQ(ierr);
787   }
788   if (!p->section) SETERRQ(PetscObjectComm((PetscObject) dm), PETSC_ERR_ARG_WRONG, "Shell partitioner information not provided. Please call PetscPartitionerShellSetPartition()");
789   ierr = PetscSectionGetChart(p->section, NULL, &np);CHKERRQ(ierr);
790   if (nparts != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of requested partitions %d != configured partitions %d", nparts, np);
791   ierr = ISGetLocalSize(p->partition, &np);CHKERRQ(ierr);
792   if (numVertices != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of input vertices %d != configured vertices %d", numVertices, np);
793   ierr = PetscSectionCopy(p->section, partSection);CHKERRQ(ierr);
794   *partition = p->partition;
795   ierr = PetscObjectReference((PetscObject) p->partition);CHKERRQ(ierr);
796   PetscFunctionReturn(0);
797 }
798 
799 static PetscErrorCode PetscPartitionerInitialize_Shell(PetscPartitioner part)
800 {
801   PetscFunctionBegin;
802   part->ops->view           = PetscPartitionerView_Shell;
803   part->ops->setfromoptions = PetscPartitionerSetFromOptions_Shell;
804   part->ops->destroy        = PetscPartitionerDestroy_Shell;
805   part->ops->partition      = PetscPartitionerPartition_Shell;
806   PetscFunctionReturn(0);
807 }
808 
809 /*MC
810   PETSCPARTITIONERSHELL = "shell" - A PetscPartitioner object
811 
812   Level: intermediate
813 
814 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
815 M*/
816 
817 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Shell(PetscPartitioner part)
818 {
819   PetscPartitioner_Shell *p;
820   PetscErrorCode          ierr;
821 
822   PetscFunctionBegin;
823   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
824   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
825   part->data = p;
826 
827   ierr = PetscPartitionerInitialize_Shell(part);CHKERRQ(ierr);
828   p->random = PETSC_FALSE;
829   PetscFunctionReturn(0);
830 }
831 
832 /*@C
833   PetscPartitionerShellSetPartition - Set an artifical partition for a mesh
834 
835   Collective on Part
836 
837   Input Parameters:
838 + part     - The PetscPartitioner
839 . size - The number of partitions
840 . sizes    - array of size size (or NULL) providing the number of points in each partition
841 - points   - array of size sum(sizes) (may be NULL iff sizes is NULL), a permutation of the points that groups those assigned to each partition in order (i.e., partition 0 first, partition 1 next, etc.)
842 
843   Level: developer
844 
845   Notes:
846 
847     It is safe to free the sizes and points arrays after use in this routine.
848 
849 .seealso DMPlexDistribute(), PetscPartitionerCreate()
850 @*/
851 PetscErrorCode PetscPartitionerShellSetPartition(PetscPartitioner part, PetscInt size, const PetscInt sizes[], const PetscInt points[])
852 {
853   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
854   PetscInt                proc, numPoints;
855   PetscErrorCode          ierr;
856 
857   PetscFunctionBegin;
858   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
859   if (sizes)  {PetscValidPointer(sizes, 3);}
860   if (points) {PetscValidPointer(points, 4);}
861   ierr = PetscSectionDestroy(&p->section);CHKERRQ(ierr);
862   ierr = ISDestroy(&p->partition);CHKERRQ(ierr);
863   ierr = PetscSectionCreate(PetscObjectComm((PetscObject) part), &p->section);CHKERRQ(ierr);
864   ierr = PetscSectionSetChart(p->section, 0, size);CHKERRQ(ierr);
865   if (sizes) {
866     for (proc = 0; proc < size; ++proc) {
867       ierr = PetscSectionSetDof(p->section, proc, sizes[proc]);CHKERRQ(ierr);
868     }
869   }
870   ierr = PetscSectionSetUp(p->section);CHKERRQ(ierr);
871   ierr = PetscSectionGetStorageSize(p->section, &numPoints);CHKERRQ(ierr);
872   ierr = ISCreateGeneral(PetscObjectComm((PetscObject) part), numPoints, points, PETSC_COPY_VALUES, &p->partition);CHKERRQ(ierr);
873   PetscFunctionReturn(0);
874 }
875 
876 /*@
877   PetscPartitionerShellSetRandom - Set the flag to use a random partition
878 
879   Collective on Part
880 
881   Input Parameters:
882 + part   - The PetscPartitioner
883 - random - The flag to use a random partition
884 
885   Level: intermediate
886 
887 .seealso PetscPartitionerShellGetRandom(), PetscPartitionerCreate()
888 @*/
889 PetscErrorCode PetscPartitionerShellSetRandom(PetscPartitioner part, PetscBool random)
890 {
891   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
892 
893   PetscFunctionBegin;
894   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
895   p->random = random;
896   PetscFunctionReturn(0);
897 }
898 
899 /*@
900   PetscPartitionerShellGetRandom - get the flag to use a random partition
901 
902   Collective on Part
903 
904   Input Parameter:
905 . part   - The PetscPartitioner
906 
907   Output Parameter
908 . random - The flag to use a random partition
909 
910   Level: intermediate
911 
912 .seealso PetscPartitionerShellSetRandom(), PetscPartitionerCreate()
913 @*/
914 PetscErrorCode PetscPartitionerShellGetRandom(PetscPartitioner part, PetscBool *random)
915 {
916   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
917 
918   PetscFunctionBegin;
919   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
920   PetscValidPointer(random, 2);
921   *random = p->random;
922   PetscFunctionReturn(0);
923 }
924 
925 static PetscErrorCode PetscPartitionerDestroy_Simple(PetscPartitioner part)
926 {
927   PetscPartitioner_Simple *p = (PetscPartitioner_Simple *) part->data;
928   PetscErrorCode          ierr;
929 
930   PetscFunctionBegin;
931   ierr = PetscFree(p);CHKERRQ(ierr);
932   PetscFunctionReturn(0);
933 }
934 
935 static PetscErrorCode PetscPartitionerView_Simple_Ascii(PetscPartitioner part, PetscViewer viewer)
936 {
937   PetscViewerFormat format;
938   PetscErrorCode    ierr;
939 
940   PetscFunctionBegin;
941   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
942   ierr = PetscViewerASCIIPrintf(viewer, "Simple Graph Partitioner:\n");CHKERRQ(ierr);
943   PetscFunctionReturn(0);
944 }
945 
946 static PetscErrorCode PetscPartitionerView_Simple(PetscPartitioner part, PetscViewer viewer)
947 {
948   PetscBool      iascii;
949   PetscErrorCode ierr;
950 
951   PetscFunctionBegin;
952   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
953   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
954   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
955   if (iascii) {ierr = PetscPartitionerView_Simple_Ascii(part, viewer);CHKERRQ(ierr);}
956   PetscFunctionReturn(0);
957 }
958 
959 static PetscErrorCode PetscPartitionerPartition_Simple(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
960 {
961   MPI_Comm       comm;
962   PetscInt       np;
963   PetscMPIInt    size;
964   PetscErrorCode ierr;
965 
966   PetscFunctionBegin;
967   comm = PetscObjectComm((PetscObject)dm);
968   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
969   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
970   ierr = ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);CHKERRQ(ierr);
971   if (size == 1) {
972     for (np = 0; np < nparts; ++np) {ierr = PetscSectionSetDof(partSection, np, numVertices/nparts + ((numVertices % nparts) > np));CHKERRQ(ierr);}
973   }
974   else {
975     PetscMPIInt rank;
976     PetscInt nvGlobal, *offsets, myFirst, myLast;
977 
978     ierr = PetscMalloc1(size+1,&offsets);CHKERRQ(ierr);
979     offsets[0] = 0;
980     ierr = MPI_Allgather(&numVertices,1,MPIU_INT,&offsets[1],1,MPIU_INT,comm);CHKERRQ(ierr);
981     for (np = 2; np <= size; np++) {
982       offsets[np] += offsets[np-1];
983     }
984     nvGlobal = offsets[size];
985     ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
986     myFirst = offsets[rank];
987     myLast  = offsets[rank + 1] - 1;
988     ierr = PetscFree(offsets);CHKERRQ(ierr);
989     if (numVertices) {
990       PetscInt firstPart = 0, firstLargePart = 0;
991       PetscInt lastPart = 0, lastLargePart = 0;
992       PetscInt rem = nvGlobal % nparts;
993       PetscInt pSmall = nvGlobal/nparts;
994       PetscInt pBig = nvGlobal/nparts + 1;
995 
996 
997       if (rem) {
998         firstLargePart = myFirst / pBig;
999         lastLargePart  = myLast  / pBig;
1000 
1001         if (firstLargePart < rem) {
1002           firstPart = firstLargePart;
1003         }
1004         else {
1005           firstPart = rem + (myFirst - (rem * pBig)) / pSmall;
1006         }
1007         if (lastLargePart < rem) {
1008           lastPart = lastLargePart;
1009         }
1010         else {
1011           lastPart = rem + (myLast - (rem * pBig)) / pSmall;
1012         }
1013       }
1014       else {
1015         firstPart = myFirst / (nvGlobal/nparts);
1016         lastPart  = myLast  / (nvGlobal/nparts);
1017       }
1018 
1019       for (np = firstPart; np <= lastPart; np++) {
1020         PetscInt PartStart =  np    * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np);
1021         PetscInt PartEnd   = (np+1) * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np+1);
1022 
1023         PartStart = PetscMax(PartStart,myFirst);
1024         PartEnd   = PetscMin(PartEnd,myLast+1);
1025         ierr = PetscSectionSetDof(partSection,np,PartEnd-PartStart);CHKERRQ(ierr);
1026       }
1027     }
1028   }
1029   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1030   PetscFunctionReturn(0);
1031 }
1032 
1033 static PetscErrorCode PetscPartitionerInitialize_Simple(PetscPartitioner part)
1034 {
1035   PetscFunctionBegin;
1036   part->ops->view      = PetscPartitionerView_Simple;
1037   part->ops->destroy   = PetscPartitionerDestroy_Simple;
1038   part->ops->partition = PetscPartitionerPartition_Simple;
1039   PetscFunctionReturn(0);
1040 }
1041 
1042 /*MC
1043   PETSCPARTITIONERSIMPLE = "simple" - A PetscPartitioner object
1044 
1045   Level: intermediate
1046 
1047 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1048 M*/
1049 
1050 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Simple(PetscPartitioner part)
1051 {
1052   PetscPartitioner_Simple *p;
1053   PetscErrorCode           ierr;
1054 
1055   PetscFunctionBegin;
1056   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1057   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1058   part->data = p;
1059 
1060   ierr = PetscPartitionerInitialize_Simple(part);CHKERRQ(ierr);
1061   PetscFunctionReturn(0);
1062 }
1063 
1064 static PetscErrorCode PetscPartitionerDestroy_Gather(PetscPartitioner part)
1065 {
1066   PetscPartitioner_Gather *p = (PetscPartitioner_Gather *) part->data;
1067   PetscErrorCode          ierr;
1068 
1069   PetscFunctionBegin;
1070   ierr = PetscFree(p);CHKERRQ(ierr);
1071   PetscFunctionReturn(0);
1072 }
1073 
1074 static PetscErrorCode PetscPartitionerView_Gather_Ascii(PetscPartitioner part, PetscViewer viewer)
1075 {
1076   PetscViewerFormat format;
1077   PetscErrorCode    ierr;
1078 
1079   PetscFunctionBegin;
1080   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
1081   ierr = PetscViewerASCIIPrintf(viewer, "Gather Graph Partitioner:\n");CHKERRQ(ierr);
1082   PetscFunctionReturn(0);
1083 }
1084 
1085 static PetscErrorCode PetscPartitionerView_Gather(PetscPartitioner part, PetscViewer viewer)
1086 {
1087   PetscBool      iascii;
1088   PetscErrorCode ierr;
1089 
1090   PetscFunctionBegin;
1091   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1092   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1093   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1094   if (iascii) {ierr = PetscPartitionerView_Gather_Ascii(part, viewer);CHKERRQ(ierr);}
1095   PetscFunctionReturn(0);
1096 }
1097 
1098 static PetscErrorCode PetscPartitionerPartition_Gather(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1099 {
1100   PetscInt       np;
1101   PetscErrorCode ierr;
1102 
1103   PetscFunctionBegin;
1104   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1105   ierr = ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);CHKERRQ(ierr);
1106   ierr = PetscSectionSetDof(partSection,0,numVertices);CHKERRQ(ierr);
1107   for (np = 1; np < nparts; ++np) {ierr = PetscSectionSetDof(partSection, np, 0);CHKERRQ(ierr);}
1108   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1109   PetscFunctionReturn(0);
1110 }
1111 
1112 static PetscErrorCode PetscPartitionerInitialize_Gather(PetscPartitioner part)
1113 {
1114   PetscFunctionBegin;
1115   part->ops->view      = PetscPartitionerView_Gather;
1116   part->ops->destroy   = PetscPartitionerDestroy_Gather;
1117   part->ops->partition = PetscPartitionerPartition_Gather;
1118   PetscFunctionReturn(0);
1119 }
1120 
1121 /*MC
1122   PETSCPARTITIONERGATHER = "gather" - A PetscPartitioner object
1123 
1124   Level: intermediate
1125 
1126 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1127 M*/
1128 
1129 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Gather(PetscPartitioner part)
1130 {
1131   PetscPartitioner_Gather *p;
1132   PetscErrorCode           ierr;
1133 
1134   PetscFunctionBegin;
1135   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1136   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1137   part->data = p;
1138 
1139   ierr = PetscPartitionerInitialize_Gather(part);CHKERRQ(ierr);
1140   PetscFunctionReturn(0);
1141 }
1142 
1143 
1144 static PetscErrorCode PetscPartitionerDestroy_Chaco(PetscPartitioner part)
1145 {
1146   PetscPartitioner_Chaco *p = (PetscPartitioner_Chaco *) part->data;
1147   PetscErrorCode          ierr;
1148 
1149   PetscFunctionBegin;
1150   ierr = PetscFree(p);CHKERRQ(ierr);
1151   PetscFunctionReturn(0);
1152 }
1153 
1154 static PetscErrorCode PetscPartitionerView_Chaco_Ascii(PetscPartitioner part, PetscViewer viewer)
1155 {
1156   PetscViewerFormat format;
1157   PetscErrorCode    ierr;
1158 
1159   PetscFunctionBegin;
1160   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
1161   ierr = PetscViewerASCIIPrintf(viewer, "Chaco Graph Partitioner:\n");CHKERRQ(ierr);
1162   PetscFunctionReturn(0);
1163 }
1164 
1165 static PetscErrorCode PetscPartitionerView_Chaco(PetscPartitioner part, PetscViewer viewer)
1166 {
1167   PetscBool      iascii;
1168   PetscErrorCode ierr;
1169 
1170   PetscFunctionBegin;
1171   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1172   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1173   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1174   if (iascii) {ierr = PetscPartitionerView_Chaco_Ascii(part, viewer);CHKERRQ(ierr);}
1175   PetscFunctionReturn(0);
1176 }
1177 
1178 #if defined(PETSC_HAVE_CHACO)
1179 #if defined(PETSC_HAVE_UNISTD_H)
1180 #include <unistd.h>
1181 #endif
1182 #if defined(PETSC_HAVE_CHACO_INT_ASSIGNMENT)
1183 #include <chaco.h>
1184 #else
1185 /* Older versions of Chaco do not have an include file */
1186 PETSC_EXTERN int interface(int nvtxs, int *start, int *adjacency, int *vwgts,
1187                        float *ewgts, float *x, float *y, float *z, char *outassignname,
1188                        char *outfilename, short *assignment, int architecture, int ndims_tot,
1189                        int mesh_dims[3], double *goal, int global_method, int local_method,
1190                        int rqi_flag, int vmax, int ndims, double eigtol, long seed);
1191 #endif
1192 extern int FREE_GRAPH;
1193 #endif
1194 
1195 static PetscErrorCode PetscPartitionerPartition_Chaco(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1196 {
1197 #if defined(PETSC_HAVE_CHACO)
1198   enum {DEFAULT_METHOD = 1, INERTIAL_METHOD = 3};
1199   MPI_Comm       comm;
1200   int            nvtxs          = numVertices; /* number of vertices in full graph */
1201   int           *vwgts          = NULL;   /* weights for all vertices */
1202   float         *ewgts          = NULL;   /* weights for all edges */
1203   float         *x              = NULL, *y = NULL, *z = NULL; /* coordinates for inertial method */
1204   char          *outassignname  = NULL;   /*  name of assignment output file */
1205   char          *outfilename    = NULL;   /* output file name */
1206   int            architecture   = 1;      /* 0 => hypercube, d => d-dimensional mesh */
1207   int            ndims_tot      = 0;      /* total number of cube dimensions to divide */
1208   int            mesh_dims[3];            /* dimensions of mesh of processors */
1209   double        *goal          = NULL;    /* desired set sizes for each set */
1210   int            global_method = 1;       /* global partitioning algorithm */
1211   int            local_method  = 1;       /* local partitioning algorithm */
1212   int            rqi_flag      = 0;       /* should I use RQI/Symmlq eigensolver? */
1213   int            vmax          = 200;     /* how many vertices to coarsen down to? */
1214   int            ndims         = 1;       /* number of eigenvectors (2^d sets) */
1215   double         eigtol        = 0.001;   /* tolerance on eigenvectors */
1216   long           seed          = 123636512; /* for random graph mutations */
1217 #if defined(PETSC_HAVE_CHACO_INT_ASSIGNMENT)
1218   int           *assignment;              /* Output partition */
1219 #else
1220   short int     *assignment;              /* Output partition */
1221 #endif
1222   int            fd_stdout, fd_pipe[2];
1223   PetscInt      *points;
1224   int            i, v, p;
1225   PetscErrorCode ierr;
1226 
1227   PetscFunctionBegin;
1228   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
1229 #if defined (PETSC_USE_DEBUG)
1230   {
1231     int ival,isum;
1232     PetscBool distributed;
1233 
1234     ival = (numVertices > 0);
1235     ierr = MPI_Allreduce(&ival, &isum, 1, MPI_INT, MPI_SUM, comm);CHKERRQ(ierr);
1236     distributed = (isum > 1) ? PETSC_TRUE : PETSC_FALSE;
1237     if (distributed) SETERRQ(comm, PETSC_ERR_SUP, "Chaco cannot partition a distributed graph");
1238   }
1239 #endif
1240   if (!numVertices) {
1241     ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1242     ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1243     ierr = ISCreateGeneral(comm, 0, NULL, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1244     PetscFunctionReturn(0);
1245   }
1246   FREE_GRAPH = 0;                         /* Do not let Chaco free my memory */
1247   for (i = 0; i < start[numVertices]; ++i) ++adjacency[i];
1248 
1249   if (global_method == INERTIAL_METHOD) {
1250     /* manager.createCellCoordinates(nvtxs, &x, &y, &z); */
1251     SETERRQ(comm, PETSC_ERR_SUP, "Inertial partitioning not yet supported");
1252   }
1253   mesh_dims[0] = nparts;
1254   mesh_dims[1] = 1;
1255   mesh_dims[2] = 1;
1256   ierr = PetscMalloc1(nvtxs, &assignment);CHKERRQ(ierr);
1257   /* Chaco outputs to stdout. We redirect this to a buffer. */
1258   /* TODO: check error codes for UNIX calls */
1259 #if defined(PETSC_HAVE_UNISTD_H)
1260   {
1261     int piperet;
1262     piperet = pipe(fd_pipe);
1263     if (piperet) SETERRQ(comm,PETSC_ERR_SYS,"Could not create pipe");
1264     fd_stdout = dup(1);
1265     close(1);
1266     dup2(fd_pipe[1], 1);
1267   }
1268 #endif
1269   ierr = interface(nvtxs, (int*) start, (int*) adjacency, vwgts, ewgts, x, y, z, outassignname, outfilename,
1270                    assignment, architecture, ndims_tot, mesh_dims, goal, global_method, local_method, rqi_flag,
1271                    vmax, ndims, eigtol, seed);
1272 #if defined(PETSC_HAVE_UNISTD_H)
1273   {
1274     char msgLog[10000];
1275     int  count;
1276 
1277     fflush(stdout);
1278     count = read(fd_pipe[0], msgLog, (10000-1)*sizeof(char));
1279     if (count < 0) count = 0;
1280     msgLog[count] = 0;
1281     close(1);
1282     dup2(fd_stdout, 1);
1283     close(fd_stdout);
1284     close(fd_pipe[0]);
1285     close(fd_pipe[1]);
1286     if (ierr) SETERRQ1(comm, PETSC_ERR_LIB, "Error in Chaco library: %s", msgLog);
1287   }
1288 #else
1289   if (ierr) SETERRQ1(comm, PETSC_ERR_LIB, "Error in Chaco library: %s", "error in stdout");
1290 #endif
1291   /* Convert to PetscSection+IS */
1292   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1293   for (v = 0; v < nvtxs; ++v) {
1294     ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);
1295   }
1296   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1297   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
1298   for (p = 0, i = 0; p < nparts; ++p) {
1299     for (v = 0; v < nvtxs; ++v) {
1300       if (assignment[v] == p) points[i++] = v;
1301     }
1302   }
1303   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1304   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1305   if (global_method == INERTIAL_METHOD) {
1306     /* manager.destroyCellCoordinates(nvtxs, &x, &y, &z); */
1307   }
1308   ierr = PetscFree(assignment);CHKERRQ(ierr);
1309   for (i = 0; i < start[numVertices]; ++i) --adjacency[i];
1310   PetscFunctionReturn(0);
1311 #else
1312   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-chaco.");
1313 #endif
1314 }
1315 
1316 static PetscErrorCode PetscPartitionerInitialize_Chaco(PetscPartitioner part)
1317 {
1318   PetscFunctionBegin;
1319   part->ops->view      = PetscPartitionerView_Chaco;
1320   part->ops->destroy   = PetscPartitionerDestroy_Chaco;
1321   part->ops->partition = PetscPartitionerPartition_Chaco;
1322   PetscFunctionReturn(0);
1323 }
1324 
1325 /*MC
1326   PETSCPARTITIONERCHACO = "chaco" - A PetscPartitioner object using the Chaco library
1327 
1328   Level: intermediate
1329 
1330 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1331 M*/
1332 
1333 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Chaco(PetscPartitioner part)
1334 {
1335   PetscPartitioner_Chaco *p;
1336   PetscErrorCode          ierr;
1337 
1338   PetscFunctionBegin;
1339   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1340   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1341   part->data = p;
1342 
1343   ierr = PetscPartitionerInitialize_Chaco(part);CHKERRQ(ierr);
1344   ierr = PetscCitationsRegister(ChacoPartitionerCitation, &ChacoPartitionercite);CHKERRQ(ierr);
1345   PetscFunctionReturn(0);
1346 }
1347 
1348 static PetscErrorCode PetscPartitionerDestroy_ParMetis(PetscPartitioner part)
1349 {
1350   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
1351   PetscErrorCode             ierr;
1352 
1353   PetscFunctionBegin;
1354   ierr = PetscFree(p);CHKERRQ(ierr);
1355   PetscFunctionReturn(0);
1356 }
1357 
1358 static PetscErrorCode PetscPartitionerView_ParMetis_Ascii(PetscPartitioner part, PetscViewer viewer)
1359 {
1360   PetscViewerFormat format;
1361   PetscErrorCode    ierr;
1362 
1363   PetscFunctionBegin;
1364   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
1365   ierr = PetscViewerASCIIPrintf(viewer, "ParMetis Graph Partitioner:\n");CHKERRQ(ierr);
1366   PetscFunctionReturn(0);
1367 }
1368 
1369 static PetscErrorCode PetscPartitionerView_ParMetis(PetscPartitioner part, PetscViewer viewer)
1370 {
1371   PetscBool      iascii;
1372   PetscErrorCode ierr;
1373 
1374   PetscFunctionBegin;
1375   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1376   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1377   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1378   if (iascii) {ierr = PetscPartitionerView_ParMetis_Ascii(part, viewer);CHKERRQ(ierr);}
1379   PetscFunctionReturn(0);
1380 }
1381 
1382 #if defined(PETSC_HAVE_PARMETIS)
1383 #include <parmetis.h>
1384 #endif
1385 
1386 static PetscErrorCode PetscPartitionerPartition_ParMetis(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1387 {
1388 #if defined(PETSC_HAVE_PARMETIS)
1389   MPI_Comm       comm;
1390   PetscSection   section;
1391   PetscInt       nvtxs      = numVertices; /* The number of vertices in full graph */
1392   PetscInt      *vtxdist;                  /* Distribution of vertices across processes */
1393   PetscInt      *xadj       = start;       /* Start of edge list for each vertex */
1394   PetscInt      *adjncy     = adjacency;   /* Edge lists for all vertices */
1395   PetscInt      *vwgt       = NULL;        /* Vertex weights */
1396   PetscInt      *adjwgt     = NULL;        /* Edge weights */
1397   PetscInt       wgtflag    = 0;           /* Indicates which weights are present */
1398   PetscInt       numflag    = 0;           /* Indicates initial offset (0 or 1) */
1399   PetscInt       ncon       = 1;           /* The number of weights per vertex */
1400   PetscReal     *tpwgts;                   /* The fraction of vertex weights assigned to each partition */
1401   PetscReal     *ubvec;                    /* The balance intolerance for vertex weights */
1402   PetscInt       options[5];               /* Options */
1403   /* Outputs */
1404   PetscInt       edgeCut;                  /* The number of edges cut by the partition */
1405   PetscInt      *assignment, *points;
1406   PetscMPIInt    rank, p, v, i;
1407   PetscErrorCode ierr;
1408 
1409   PetscFunctionBegin;
1410   ierr = PetscObjectGetComm((PetscObject) part, &comm);CHKERRQ(ierr);
1411   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1412   options[0] = 0; /* Use all defaults */
1413   /* Calculate vertex distribution */
1414   ierr = PetscMalloc5(nparts+1,&vtxdist,nparts*ncon,&tpwgts,ncon,&ubvec,nvtxs,&assignment,nvtxs,&vwgt);CHKERRQ(ierr);
1415   vtxdist[0] = 0;
1416   ierr = MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm);CHKERRQ(ierr);
1417   for (p = 2; p <= nparts; ++p) {
1418     vtxdist[p] += vtxdist[p-1];
1419   }
1420   /* Calculate weights */
1421   for (p = 0; p < nparts; ++p) {
1422     tpwgts[p] = 1.0/nparts;
1423   }
1424   ubvec[0] = 1.05;
1425   /* Weight cells by dofs on cell by default */
1426   ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
1427   if (section) {
1428     PetscInt cStart, cEnd, dof;
1429 
1430     ierr = DMPlexGetHeightStratum(dm, 0, &cStart, &cEnd);CHKERRQ(ierr);
1431     for (v = cStart; v < cEnd; ++v) {
1432       ierr = PetscSectionGetDof(section, v, &dof);CHKERRQ(ierr);
1433       /* WARNING: Assumes that meshes with overlap have the overlapped cells at the end of the stratum. */
1434       /* To do this properly, we should use the cell numbering created in DMPlexCreatePartitionerGraph. */
1435       if (v-cStart < numVertices) vwgt[v-cStart] = PetscMax(dof, 1);
1436     }
1437   } else {
1438     for (v = 0; v < nvtxs; ++v) vwgt[v] = 1;
1439   }
1440 
1441   if (nparts == 1) {
1442     ierr = PetscMemzero(assignment, nvtxs * sizeof(PetscInt));CHKERRQ(ierr);
1443   } else {
1444     if (vtxdist[1] == vtxdist[nparts]) {
1445       if (!rank) {
1446         PetscStackPush("METIS_PartGraphKway");
1447         ierr = METIS_PartGraphKway(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, adjwgt, &nparts, tpwgts, ubvec, NULL, &edgeCut, assignment);
1448         PetscStackPop;
1449         if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphKway()");
1450       }
1451     } else {
1452       PetscStackPush("ParMETIS_V3_PartKway");
1453       ierr = ParMETIS_V3_PartKway(vtxdist, xadj, adjncy, vwgt, adjwgt, &wgtflag, &numflag, &ncon, &nparts, tpwgts, ubvec, options, &edgeCut, assignment, &comm);
1454       PetscStackPop;
1455       if (ierr != METIS_OK) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error %d in ParMETIS_V3_PartKway()", ierr);
1456     }
1457   }
1458   /* Convert to PetscSection+IS */
1459   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1460   for (v = 0; v < nvtxs; ++v) {ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);}
1461   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1462   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
1463   for (p = 0, i = 0; p < nparts; ++p) {
1464     for (v = 0; v < nvtxs; ++v) {
1465       if (assignment[v] == p) points[i++] = v;
1466     }
1467   }
1468   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1469   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1470   ierr = PetscFree5(vtxdist,tpwgts,ubvec,assignment,vwgt);CHKERRQ(ierr);
1471   PetscFunctionReturn(0);
1472 #else
1473   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-parmetis.");
1474 #endif
1475 }
1476 
1477 static PetscErrorCode PetscPartitionerInitialize_ParMetis(PetscPartitioner part)
1478 {
1479   PetscFunctionBegin;
1480   part->ops->view      = PetscPartitionerView_ParMetis;
1481   part->ops->destroy   = PetscPartitionerDestroy_ParMetis;
1482   part->ops->partition = PetscPartitionerPartition_ParMetis;
1483   PetscFunctionReturn(0);
1484 }
1485 
1486 /*MC
1487   PETSCPARTITIONERPARMETIS = "parmetis" - A PetscPartitioner object using the ParMetis library
1488 
1489   Level: intermediate
1490 
1491 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1492 M*/
1493 
1494 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_ParMetis(PetscPartitioner part)
1495 {
1496   PetscPartitioner_ParMetis *p;
1497   PetscErrorCode          ierr;
1498 
1499   PetscFunctionBegin;
1500   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1501   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1502   part->data = p;
1503 
1504   ierr = PetscPartitionerInitialize_ParMetis(part);CHKERRQ(ierr);
1505   ierr = PetscCitationsRegister(ParMetisPartitionerCitation, &ParMetisPartitionercite);CHKERRQ(ierr);
1506   PetscFunctionReturn(0);
1507 }
1508 
1509 /*@
1510   DMPlexGetPartitioner - Get the mesh partitioner
1511 
1512   Not collective
1513 
1514   Input Parameter:
1515 . dm - The DM
1516 
1517   Output Parameter:
1518 . part - The PetscPartitioner
1519 
1520   Level: developer
1521 
1522   Note: This gets a borrowed reference, so the user should not destroy this PetscPartitioner.
1523 
1524 .seealso DMPlexDistribute(), DMPlexSetPartitioner(), PetscPartitionerCreate()
1525 @*/
1526 PetscErrorCode DMPlexGetPartitioner(DM dm, PetscPartitioner *part)
1527 {
1528   DM_Plex       *mesh = (DM_Plex *) dm->data;
1529   PetscErrorCode ierr;
1530 
1531   PetscFunctionBegin;
1532   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1533   PetscValidPointer(part, 2);
1534   ierr = PetscPartitionerSetTypeFromOptions_Internal(mesh->partitioner);CHKERRQ(ierr);
1535   *part = mesh->partitioner;
1536   PetscFunctionReturn(0);
1537 }
1538 
1539 /*@
1540   DMPlexSetPartitioner - Set the mesh partitioner
1541 
1542   logically collective on dm and part
1543 
1544   Input Parameters:
1545 + dm - The DM
1546 - part - The partitioner
1547 
1548   Level: developer
1549 
1550   Note: Any existing PetscPartitioner will be destroyed.
1551 
1552 .seealso DMPlexDistribute(), DMPlexGetPartitioner(), PetscPartitionerCreate()
1553 @*/
1554 PetscErrorCode DMPlexSetPartitioner(DM dm, PetscPartitioner part)
1555 {
1556   DM_Plex       *mesh = (DM_Plex *) dm->data;
1557   PetscErrorCode ierr;
1558 
1559   PetscFunctionBegin;
1560   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1561   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 2);
1562   ierr = PetscObjectReference((PetscObject)part);CHKERRQ(ierr);
1563   ierr = PetscPartitionerDestroy(&mesh->partitioner);CHKERRQ(ierr);
1564   mesh->partitioner = part;
1565   PetscFunctionReturn(0);
1566 }
1567 
1568 static PetscErrorCode DMPlexPartitionLabelClosure_Tree(DM dm, DMLabel label, PetscInt rank, PetscInt point, PetscBool up, PetscBool down)
1569 {
1570   PetscErrorCode ierr;
1571 
1572   PetscFunctionBegin;
1573   if (up) {
1574     PetscInt parent;
1575 
1576     ierr = DMPlexGetTreeParent(dm,point,&parent,NULL);CHKERRQ(ierr);
1577     if (parent != point) {
1578       PetscInt closureSize, *closure = NULL, i;
1579 
1580       ierr = DMPlexGetTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);CHKERRQ(ierr);
1581       for (i = 0; i < closureSize; i++) {
1582         PetscInt cpoint = closure[2*i];
1583 
1584         ierr = DMLabelSetValue(label,cpoint,rank);CHKERRQ(ierr);
1585         ierr = DMPlexPartitionLabelClosure_Tree(dm,label,rank,cpoint,PETSC_TRUE,PETSC_FALSE);CHKERRQ(ierr);
1586       }
1587       ierr = DMPlexRestoreTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);CHKERRQ(ierr);
1588     }
1589   }
1590   if (down) {
1591     PetscInt numChildren;
1592     const PetscInt *children;
1593 
1594     ierr = DMPlexGetTreeChildren(dm,point,&numChildren,&children);CHKERRQ(ierr);
1595     if (numChildren) {
1596       PetscInt i;
1597 
1598       for (i = 0; i < numChildren; i++) {
1599         PetscInt cpoint = children[i];
1600 
1601         ierr = DMLabelSetValue(label,cpoint,rank);CHKERRQ(ierr);
1602         ierr = DMPlexPartitionLabelClosure_Tree(dm,label,rank,cpoint,PETSC_FALSE,PETSC_TRUE);CHKERRQ(ierr);
1603       }
1604     }
1605   }
1606   PetscFunctionReturn(0);
1607 }
1608 
1609 /*@
1610   DMPlexPartitionLabelClosure - Add the closure of all points to the partition label
1611 
1612   Input Parameters:
1613 + dm     - The DM
1614 - label  - DMLabel assinging ranks to remote roots
1615 
1616   Level: developer
1617 
1618 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1619 @*/
1620 PetscErrorCode DMPlexPartitionLabelClosure(DM dm, DMLabel label)
1621 {
1622   IS              rankIS,   pointIS;
1623   const PetscInt *ranks,   *points;
1624   PetscInt        numRanks, numPoints, r, p, c, closureSize;
1625   PetscInt       *closure = NULL;
1626   PetscErrorCode  ierr;
1627 
1628   PetscFunctionBegin;
1629   ierr = DMLabelGetValueIS(label, &rankIS);CHKERRQ(ierr);
1630   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
1631   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
1632   for (r = 0; r < numRanks; ++r) {
1633     const PetscInt rank = ranks[r];
1634 
1635     ierr = DMLabelGetStratumIS(label, rank, &pointIS);CHKERRQ(ierr);
1636     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
1637     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
1638     for (p = 0; p < numPoints; ++p) {
1639       ierr = DMPlexGetTransitiveClosure(dm, points[p], PETSC_TRUE, &closureSize, &closure);CHKERRQ(ierr);
1640       for (c = 0; c < closureSize*2; c += 2) {
1641         ierr = DMLabelSetValue(label, closure[c], rank);CHKERRQ(ierr);
1642         ierr = DMPlexPartitionLabelClosure_Tree(dm,label,rank,closure[c],PETSC_TRUE,PETSC_TRUE);CHKERRQ(ierr);
1643       }
1644     }
1645     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
1646     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1647   }
1648   if (closure) {ierr = DMPlexRestoreTransitiveClosure(dm, 0, PETSC_TRUE, &closureSize, &closure);CHKERRQ(ierr);}
1649   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
1650   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
1651   PetscFunctionReturn(0);
1652 }
1653 
1654 /*@
1655   DMPlexPartitionLabelAdjacency - Add one level of adjacent points to the partition label
1656 
1657   Input Parameters:
1658 + dm     - The DM
1659 - label  - DMLabel assinging ranks to remote roots
1660 
1661   Level: developer
1662 
1663 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1664 @*/
1665 PetscErrorCode DMPlexPartitionLabelAdjacency(DM dm, DMLabel label)
1666 {
1667   IS              rankIS,   pointIS;
1668   const PetscInt *ranks,   *points;
1669   PetscInt        numRanks, numPoints, r, p, a, adjSize;
1670   PetscInt       *adj = NULL;
1671   PetscErrorCode  ierr;
1672 
1673   PetscFunctionBegin;
1674   ierr = DMLabelGetValueIS(label, &rankIS);CHKERRQ(ierr);
1675   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
1676   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
1677   for (r = 0; r < numRanks; ++r) {
1678     const PetscInt rank = ranks[r];
1679 
1680     ierr = DMLabelGetStratumIS(label, rank, &pointIS);CHKERRQ(ierr);
1681     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
1682     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
1683     for (p = 0; p < numPoints; ++p) {
1684       adjSize = PETSC_DETERMINE;
1685       ierr = DMPlexGetAdjacency(dm, points[p], &adjSize, &adj);CHKERRQ(ierr);
1686       for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(label, adj[a], rank);CHKERRQ(ierr);}
1687     }
1688     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
1689     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1690   }
1691   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
1692   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
1693   ierr = PetscFree(adj);CHKERRQ(ierr);
1694   PetscFunctionReturn(0);
1695 }
1696 
1697 /*@
1698   DMPlexPartitionLabelPropagate - Propagate points in a partition label over the point SF
1699 
1700   Input Parameters:
1701 + dm     - The DM
1702 - label  - DMLabel assinging ranks to remote roots
1703 
1704   Level: developer
1705 
1706   Note: This is required when generating multi-level overlaps to capture
1707   overlap points from non-neighbouring partitions.
1708 
1709 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1710 @*/
1711 PetscErrorCode DMPlexPartitionLabelPropagate(DM dm, DMLabel label)
1712 {
1713   MPI_Comm        comm;
1714   PetscMPIInt     rank;
1715   PetscSF         sfPoint;
1716   DMLabel         lblRoots, lblLeaves;
1717   IS              rankIS, pointIS;
1718   const PetscInt *ranks;
1719   PetscInt        numRanks, r;
1720   PetscErrorCode  ierr;
1721 
1722   PetscFunctionBegin;
1723   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
1724   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1725   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
1726   /* Pull point contributions from remote leaves into local roots */
1727   ierr = DMLabelGather(label, sfPoint, &lblLeaves);CHKERRQ(ierr);
1728   ierr = DMLabelGetValueIS(lblLeaves, &rankIS);CHKERRQ(ierr);
1729   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
1730   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
1731   for (r = 0; r < numRanks; ++r) {
1732     const PetscInt remoteRank = ranks[r];
1733     if (remoteRank == rank) continue;
1734     ierr = DMLabelGetStratumIS(lblLeaves, remoteRank, &pointIS);CHKERRQ(ierr);
1735     ierr = DMLabelInsertIS(label, pointIS, remoteRank);CHKERRQ(ierr);
1736     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1737   }
1738   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
1739   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
1740   ierr = DMLabelDestroy(&lblLeaves);CHKERRQ(ierr);
1741   /* Push point contributions from roots into remote leaves */
1742   ierr = DMLabelDistribute(label, sfPoint, &lblRoots);CHKERRQ(ierr);
1743   ierr = DMLabelGetValueIS(lblRoots, &rankIS);CHKERRQ(ierr);
1744   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
1745   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
1746   for (r = 0; r < numRanks; ++r) {
1747     const PetscInt remoteRank = ranks[r];
1748     if (remoteRank == rank) continue;
1749     ierr = DMLabelGetStratumIS(lblRoots, remoteRank, &pointIS);CHKERRQ(ierr);
1750     ierr = DMLabelInsertIS(label, pointIS, remoteRank);CHKERRQ(ierr);
1751     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1752   }
1753   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
1754   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
1755   ierr = DMLabelDestroy(&lblRoots);CHKERRQ(ierr);
1756   PetscFunctionReturn(0);
1757 }
1758 
1759 /*@
1760   DMPlexPartitionLabelInvert - Create a partition label of remote roots from a local root label
1761 
1762   Input Parameters:
1763 + dm        - The DM
1764 . rootLabel - DMLabel assinging ranks to local roots
1765 . processSF - A star forest mapping into the local index on each remote rank
1766 
1767   Output Parameter:
1768 - leafLabel - DMLabel assinging ranks to remote roots
1769 
1770   Note: The rootLabel defines a send pattern by mapping local points to remote target ranks. The
1771   resulting leafLabel is a receiver mapping of remote roots to their parent rank.
1772 
1773   Level: developer
1774 
1775 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1776 @*/
1777 PetscErrorCode DMPlexPartitionLabelInvert(DM dm, DMLabel rootLabel, PetscSF processSF, DMLabel leafLabel)
1778 {
1779   MPI_Comm           comm;
1780   PetscMPIInt        rank, size;
1781   PetscInt           p, n, numNeighbors, ssize, l, nleaves;
1782   PetscSF            sfPoint;
1783   PetscSFNode       *rootPoints, *leafPoints;
1784   PetscSection       rootSection, leafSection;
1785   const PetscSFNode *remote;
1786   const PetscInt    *local, *neighbors;
1787   IS                 valueIS;
1788   PetscErrorCode     ierr;
1789 
1790   PetscFunctionBegin;
1791   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
1792   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1793   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
1794   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
1795 
1796   /* Convert to (point, rank) and use actual owners */
1797   ierr = PetscSectionCreate(comm, &rootSection);CHKERRQ(ierr);
1798   ierr = PetscSectionSetChart(rootSection, 0, size);CHKERRQ(ierr);
1799   ierr = DMLabelGetValueIS(rootLabel, &valueIS);CHKERRQ(ierr);
1800   ierr = ISGetLocalSize(valueIS, &numNeighbors);CHKERRQ(ierr);
1801   ierr = ISGetIndices(valueIS, &neighbors);CHKERRQ(ierr);
1802   for (n = 0; n < numNeighbors; ++n) {
1803     PetscInt numPoints;
1804 
1805     ierr = DMLabelGetStratumSize(rootLabel, neighbors[n], &numPoints);CHKERRQ(ierr);
1806     ierr = PetscSectionAddDof(rootSection, neighbors[n], numPoints);CHKERRQ(ierr);
1807   }
1808   ierr = PetscSectionSetUp(rootSection);CHKERRQ(ierr);
1809   ierr = PetscSectionGetStorageSize(rootSection, &ssize);CHKERRQ(ierr);
1810   ierr = PetscMalloc1(ssize, &rootPoints);CHKERRQ(ierr);
1811   ierr = PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);CHKERRQ(ierr);
1812   for (n = 0; n < numNeighbors; ++n) {
1813     IS              pointIS;
1814     const PetscInt *points;
1815     PetscInt        off, numPoints, p;
1816 
1817     ierr = PetscSectionGetOffset(rootSection, neighbors[n], &off);CHKERRQ(ierr);
1818     ierr = DMLabelGetStratumIS(rootLabel, neighbors[n], &pointIS);CHKERRQ(ierr);
1819     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
1820     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
1821     for (p = 0; p < numPoints; ++p) {
1822       if (local) {ierr = PetscFindInt(points[p], nleaves, local, &l);CHKERRQ(ierr);}
1823       else       {l = -1;}
1824       if (l >= 0) {rootPoints[off+p] = remote[l];}
1825       else        {rootPoints[off+p].index = points[p]; rootPoints[off+p].rank = rank;}
1826     }
1827     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
1828     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1829   }
1830   ierr = ISRestoreIndices(valueIS, &neighbors);CHKERRQ(ierr);
1831   ierr = ISDestroy(&valueIS);CHKERRQ(ierr);
1832   /* Communicate overlap */
1833   ierr = PetscSectionCreate(comm, &leafSection);CHKERRQ(ierr);
1834   ierr = DMPlexDistributeData(dm, processSF, rootSection, MPIU_2INT, rootPoints, leafSection, (void**) &leafPoints);CHKERRQ(ierr);
1835   /* Filter remote contributions (ovLeafPoints) into the overlapSF */
1836   ierr = PetscSectionGetStorageSize(leafSection, &ssize);CHKERRQ(ierr);
1837   for (p = 0; p < ssize; p++) {
1838     ierr = DMLabelSetValue(leafLabel, leafPoints[p].index, leafPoints[p].rank);CHKERRQ(ierr);
1839   }
1840   ierr = PetscFree(rootPoints);CHKERRQ(ierr);
1841   ierr = PetscSectionDestroy(&rootSection);CHKERRQ(ierr);
1842   ierr = PetscFree(leafPoints);CHKERRQ(ierr);
1843   ierr = PetscSectionDestroy(&leafSection);CHKERRQ(ierr);
1844   PetscFunctionReturn(0);
1845 }
1846 
1847 /*@
1848   DMPlexPartitionLabelCreateSF - Create a star forest from a label that assigns ranks to points
1849 
1850   Input Parameters:
1851 + dm    - The DM
1852 . label - DMLabel assinging ranks to remote roots
1853 
1854   Output Parameter:
1855 - sf    - The star forest communication context encapsulating the defined mapping
1856 
1857   Note: The incoming label is a receiver mapping of remote points to their parent rank.
1858 
1859   Level: developer
1860 
1861 .seealso: DMPlexDistribute(), DMPlexCreateOverlap
1862 @*/
1863 PetscErrorCode DMPlexPartitionLabelCreateSF(DM dm, DMLabel label, PetscSF *sf)
1864 {
1865   PetscMPIInt     rank, size;
1866   PetscInt        n, numRemote, p, numPoints, pStart, pEnd, idx = 0;
1867   PetscSFNode    *remotePoints;
1868   IS              remoteRootIS;
1869   const PetscInt *remoteRoots;
1870   PetscErrorCode ierr;
1871 
1872   PetscFunctionBegin;
1873   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
1874   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &size);CHKERRQ(ierr);
1875 
1876   for (numRemote = 0, n = 0; n < size; ++n) {
1877     ierr = DMLabelGetStratumSize(label, n, &numPoints);CHKERRQ(ierr);
1878     numRemote += numPoints;
1879   }
1880   ierr = PetscMalloc1(numRemote, &remotePoints);CHKERRQ(ierr);
1881   /* Put owned points first */
1882   ierr = DMLabelGetStratumSize(label, rank, &numPoints);CHKERRQ(ierr);
1883   if (numPoints > 0) {
1884     ierr = DMLabelGetStratumIS(label, rank, &remoteRootIS);CHKERRQ(ierr);
1885     ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
1886     for (p = 0; p < numPoints; p++) {
1887       remotePoints[idx].index = remoteRoots[p];
1888       remotePoints[idx].rank = rank;
1889       idx++;
1890     }
1891     ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
1892     ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr);
1893   }
1894   /* Now add remote points */
1895   for (n = 0; n < size; ++n) {
1896     ierr = DMLabelGetStratumSize(label, n, &numPoints);CHKERRQ(ierr);
1897     if (numPoints <= 0 || n == rank) continue;
1898     ierr = DMLabelGetStratumIS(label, n, &remoteRootIS);CHKERRQ(ierr);
1899     ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
1900     for (p = 0; p < numPoints; p++) {
1901       remotePoints[idx].index = remoteRoots[p];
1902       remotePoints[idx].rank = n;
1903       idx++;
1904     }
1905     ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
1906     ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr);
1907   }
1908   ierr = PetscSFCreate(PetscObjectComm((PetscObject) dm), sf);CHKERRQ(ierr);
1909   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
1910   ierr = PetscSFSetGraph(*sf, pEnd-pStart, numRemote, NULL, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);CHKERRQ(ierr);
1911   PetscFunctionReturn(0);
1912 }
1913