xref: /petsc/src/dm/impls/plex/plexpartition.c (revision a0058e54d981432273891bb2a2a873273067bde0)
1 #include <petsc/private/dmpleximpl.h>   /*I      "petscdmplex.h"   I*/
2 
3 PetscClassId PETSCPARTITIONER_CLASSID = 0;
4 
5 PetscFunctionList PetscPartitionerList              = NULL;
6 PetscBool         PetscPartitionerRegisterAllCalled = PETSC_FALSE;
7 
8 PetscBool ChacoPartitionercite = PETSC_FALSE;
9 const char ChacoPartitionerCitation[] = "@inproceedings{Chaco95,\n"
10                                "  author    = {Bruce Hendrickson and Robert Leland},\n"
11                                "  title     = {A multilevel algorithm for partitioning graphs},\n"
12                                "  booktitle = {Supercomputing '95: Proceedings of the 1995 ACM/IEEE Conference on Supercomputing (CDROM)},"
13                                "  isbn      = {0-89791-816-9},\n"
14                                "  pages     = {28},\n"
15                                "  doi       = {http://doi.acm.org/10.1145/224170.224228},\n"
16                                "  publisher = {ACM Press},\n"
17                                "  address   = {New York},\n"
18                                "  year      = {1995}\n}\n";
19 
20 PetscBool ParMetisPartitionercite = PETSC_FALSE;
21 const char ParMetisPartitionerCitation[] = "@article{KarypisKumar98,\n"
22                                "  author  = {George Karypis and Vipin Kumar},\n"
23                                "  title   = {A Parallel Algorithm for Multilevel Graph Partitioning and Sparse Matrix Ordering},\n"
24                                "  journal = {Journal of Parallel and Distributed Computing},\n"
25                                "  volume  = {48},\n"
26                                "  pages   = {71--85},\n"
27                                "  year    = {1998}\n}\n";
28 
29 /*@C
30   DMPlexCreatePartitionerGraph - Create a CSR graph of point connections for the partitioner
31 
32   Input Parameters:
33 + dm      - The mesh DM dm
34 - height  - Height of the strata from which to construct the graph
35 
36   Output Parameter:
37 + numVertices     - Number of vertices in the graph
38 . offsets         - Point offsets in the graph
39 . adjacency       - Point connectivity in the graph
40 - globalNumbering - A map from the local cell numbering to the global numbering used in "adjacency".  Negative indicates that the cell is a duplicate from another process.
41 
42   The user can control the definition of adjacency for the mesh using DMPlexGetAdjacencyUseCone() and
43   DMPlexSetAdjacencyUseClosure(). They should choose the combination appropriate for the function
44   representation on the mesh.
45 
46   Level: developer
47 
48 .seealso: PetscPartitionerGetType(), PetscPartitionerCreate(), DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure()
49 @*/
50 PetscErrorCode DMPlexCreatePartitionerGraph(DM dm, PetscInt height, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency, IS *globalNumbering)
51 {
52   PetscInt       p, pStart, pEnd, a, adjSize, idx, size, nroots;
53   PetscInt      *adj = NULL, *vOffsets = NULL, *graph = NULL;
54   IS             cellNumbering;
55   const PetscInt *cellNum;
56   PetscBool      useCone, useClosure;
57   PetscSection   section;
58   PetscSegBuffer adjBuffer;
59   PetscSF        sfPoint;
60   PetscMPIInt    rank;
61   PetscErrorCode ierr;
62 
63   PetscFunctionBegin;
64   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
65   ierr = DMPlexGetHeightStratum(dm, height, &pStart, &pEnd);CHKERRQ(ierr);
66   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
67   ierr = PetscSFGetGraph(sfPoint, &nroots, NULL, NULL, NULL);CHKERRQ(ierr);
68   /* Build adjacency graph via a section/segbuffer */
69   ierr = PetscSectionCreate(PetscObjectComm((PetscObject) dm), &section);CHKERRQ(ierr);
70   ierr = PetscSectionSetChart(section, pStart, pEnd);CHKERRQ(ierr);
71   ierr = PetscSegBufferCreate(sizeof(PetscInt),1000,&adjBuffer);CHKERRQ(ierr);
72   /* Always use FVM adjacency to create partitioner graph */
73   ierr = DMPlexGetAdjacencyUseCone(dm, &useCone);CHKERRQ(ierr);
74   ierr = DMPlexGetAdjacencyUseClosure(dm, &useClosure);CHKERRQ(ierr);
75   ierr = DMPlexSetAdjacencyUseCone(dm, PETSC_TRUE);CHKERRQ(ierr);
76   ierr = DMPlexSetAdjacencyUseClosure(dm, PETSC_FALSE);CHKERRQ(ierr);
77   ierr = DMPlexCreateCellNumbering_Internal(dm, PETSC_TRUE, &cellNumbering);CHKERRQ(ierr);
78   if (globalNumbering) {
79     ierr = PetscObjectReference((PetscObject)cellNumbering);CHKERRQ(ierr);
80     *globalNumbering = cellNumbering;
81   }
82   ierr = ISGetIndices(cellNumbering, &cellNum);CHKERRQ(ierr);
83   for (*numVertices = 0, p = pStart; p < pEnd; p++) {
84     /* Skip non-owned cells in parallel (ParMetis expects no overlap) */
85     if (nroots > 0) {if (cellNum[p] < 0) continue;}
86     adjSize = PETSC_DETERMINE;
87     ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
88     for (a = 0; a < adjSize; ++a) {
89       const PetscInt point = adj[a];
90       if (point != p && pStart <= point && point < pEnd) {
91         PetscInt *PETSC_RESTRICT pBuf;
92         ierr = PetscSectionAddDof(section, p, 1);CHKERRQ(ierr);
93         ierr = PetscSegBufferGetInts(adjBuffer, 1, &pBuf);CHKERRQ(ierr);
94         *pBuf = point;
95       }
96     }
97     (*numVertices)++;
98   }
99   ierr = DMPlexSetAdjacencyUseCone(dm, useCone);CHKERRQ(ierr);
100   ierr = DMPlexSetAdjacencyUseClosure(dm, useClosure);CHKERRQ(ierr);
101   /* Derive CSR graph from section/segbuffer */
102   ierr = PetscSectionSetUp(section);CHKERRQ(ierr);
103   ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr);
104   ierr = PetscMalloc1(*numVertices+1, &vOffsets);CHKERRQ(ierr);
105   for (idx = 0, p = pStart; p < pEnd; p++) {
106     if (nroots > 0) {if (cellNum[p] < 0) continue;}
107     ierr = PetscSectionGetOffset(section, p, &(vOffsets[idx++]));CHKERRQ(ierr);
108   }
109   vOffsets[*numVertices] = size;
110   if (offsets) *offsets = vOffsets;
111   ierr = PetscSegBufferExtractAlloc(adjBuffer, &graph);CHKERRQ(ierr);
112   {
113     ISLocalToGlobalMapping ltogCells;
114     PetscInt n, size, *cells_arr;
115     /* In parallel, apply a global cell numbering to the graph */
116     ierr = ISRestoreIndices(cellNumbering, &cellNum);CHKERRQ(ierr);
117     ierr = ISLocalToGlobalMappingCreateIS(cellNumbering, &ltogCells);CHKERRQ(ierr);
118     ierr = ISLocalToGlobalMappingGetSize(ltogCells, &size);CHKERRQ(ierr);
119     ierr = ISLocalToGlobalMappingGetIndices(ltogCells, (const PetscInt**)&cells_arr);CHKERRQ(ierr);
120     /* Convert to positive global cell numbers */
121     for (n=0; n<size; n++) {if (cells_arr[n] < 0) cells_arr[n] = -(cells_arr[n]+1);}
122     ierr = ISLocalToGlobalMappingRestoreIndices(ltogCells, (const PetscInt**)&cells_arr);CHKERRQ(ierr);
123     ierr = ISLocalToGlobalMappingApplyBlock(ltogCells, vOffsets[*numVertices], graph, graph);CHKERRQ(ierr);
124     ierr = ISLocalToGlobalMappingDestroy(&ltogCells);CHKERRQ(ierr);
125     ierr = ISDestroy(&cellNumbering);CHKERRQ(ierr);
126   }
127   if (adjacency) *adjacency = graph;
128   /* Clean up */
129   ierr = PetscSegBufferDestroy(&adjBuffer);CHKERRQ(ierr);
130   ierr = PetscSectionDestroy(&section);CHKERRQ(ierr);
131   ierr = PetscFree(adj);CHKERRQ(ierr);
132   PetscFunctionReturn(0);
133 }
134 
135 /*@C
136   DMPlexCreateNeighborCSR - Create a mesh graph (cell-cell adjacency) in parallel CSR format.
137 
138   Collective
139 
140   Input Arguments:
141 + dm - The DMPlex
142 - cellHeight - The height of mesh points to treat as cells (default should be 0)
143 
144   Output Arguments:
145 + numVertices - The number of local vertices in the graph, or cells in the mesh.
146 . offsets     - The offset to the adjacency list for each cell
147 - adjacency   - The adjacency list for all cells
148 
149   Note: This is suitable for input to a mesh partitioner like ParMetis.
150 
151   Level: advanced
152 
153 .seealso: DMPlexCreate()
154 @*/
155 PetscErrorCode DMPlexCreateNeighborCSR(DM dm, PetscInt cellHeight, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency)
156 {
157   const PetscInt maxFaceCases = 30;
158   PetscInt       numFaceCases = 0;
159   PetscInt       numFaceVertices[30]; /* maxFaceCases, C89 sucks sucks sucks */
160   PetscInt      *off, *adj;
161   PetscInt      *neighborCells = NULL;
162   PetscInt       dim, cellDim, depth = 0, faceDepth, cStart, cEnd, c, numCells, cell;
163   PetscErrorCode ierr;
164 
165   PetscFunctionBegin;
166   /* For parallel partitioning, I think you have to communicate supports */
167   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
168   cellDim = dim - cellHeight;
169   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
170   ierr = DMPlexGetHeightStratum(dm, cellHeight, &cStart, &cEnd);CHKERRQ(ierr);
171   if (cEnd - cStart == 0) {
172     if (numVertices) *numVertices = 0;
173     if (offsets)   *offsets   = NULL;
174     if (adjacency) *adjacency = NULL;
175     PetscFunctionReturn(0);
176   }
177   numCells  = cEnd - cStart;
178   faceDepth = depth - cellHeight;
179   if (dim == depth) {
180     PetscInt f, fStart, fEnd;
181 
182     ierr = PetscCalloc1(numCells+1, &off);CHKERRQ(ierr);
183     /* Count neighboring cells */
184     ierr = DMPlexGetHeightStratum(dm, cellHeight+1, &fStart, &fEnd);CHKERRQ(ierr);
185     for (f = fStart; f < fEnd; ++f) {
186       const PetscInt *support;
187       PetscInt        supportSize;
188       ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
189       ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
190       if (supportSize == 2) {
191         PetscInt numChildren;
192 
193         ierr = DMPlexGetTreeChildren(dm,f,&numChildren,NULL);CHKERRQ(ierr);
194         if (!numChildren) {
195           ++off[support[0]-cStart+1];
196           ++off[support[1]-cStart+1];
197         }
198       }
199     }
200     /* Prefix sum */
201     for (c = 1; c <= numCells; ++c) off[c] += off[c-1];
202     if (adjacency) {
203       PetscInt *tmp;
204 
205       ierr = PetscMalloc1(off[numCells], &adj);CHKERRQ(ierr);
206       ierr = PetscMalloc1(numCells+1, &tmp);CHKERRQ(ierr);
207       ierr = PetscMemcpy(tmp, off, (numCells+1) * sizeof(PetscInt));CHKERRQ(ierr);
208       /* Get neighboring cells */
209       for (f = fStart; f < fEnd; ++f) {
210         const PetscInt *support;
211         PetscInt        supportSize;
212         ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
213         ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
214         if (supportSize == 2) {
215           PetscInt numChildren;
216 
217           ierr = DMPlexGetTreeChildren(dm,f,&numChildren,NULL);CHKERRQ(ierr);
218           if (!numChildren) {
219             adj[tmp[support[0]-cStart]++] = support[1];
220             adj[tmp[support[1]-cStart]++] = support[0];
221           }
222         }
223       }
224 #if defined(PETSC_USE_DEBUG)
225       for (c = 0; c < cEnd-cStart; ++c) if (tmp[c] != off[c+1]) SETERRQ3(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Offset %d != %d for cell %d", tmp[c], off[c], c+cStart);
226 #endif
227       ierr = PetscFree(tmp);CHKERRQ(ierr);
228     }
229     if (numVertices) *numVertices = numCells;
230     if (offsets)   *offsets   = off;
231     if (adjacency) *adjacency = adj;
232     PetscFunctionReturn(0);
233   }
234   /* Setup face recognition */
235   if (faceDepth == 1) {
236     PetscInt cornersSeen[30] = {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0}; /* Could use PetscBT */
237 
238     for (c = cStart; c < cEnd; ++c) {
239       PetscInt corners;
240 
241       ierr = DMPlexGetConeSize(dm, c, &corners);CHKERRQ(ierr);
242       if (!cornersSeen[corners]) {
243         PetscInt nFV;
244 
245         if (numFaceCases >= maxFaceCases) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_PLIB, "Exceeded maximum number of face recognition cases");
246         cornersSeen[corners] = 1;
247 
248         ierr = DMPlexGetNumFaceVertices(dm, cellDim, corners, &nFV);CHKERRQ(ierr);
249 
250         numFaceVertices[numFaceCases++] = nFV;
251       }
252     }
253   }
254   ierr = PetscCalloc1(numCells+1, &off);CHKERRQ(ierr);
255   /* Count neighboring cells */
256   for (cell = cStart; cell < cEnd; ++cell) {
257     PetscInt numNeighbors = PETSC_DETERMINE, n;
258 
259     ierr = DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);CHKERRQ(ierr);
260     /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
261     for (n = 0; n < numNeighbors; ++n) {
262       PetscInt        cellPair[2];
263       PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
264       PetscInt        meetSize = 0;
265       const PetscInt *meet    = NULL;
266 
267       cellPair[0] = cell; cellPair[1] = neighborCells[n];
268       if (cellPair[0] == cellPair[1]) continue;
269       if (!found) {
270         ierr = DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
271         if (meetSize) {
272           PetscInt f;
273 
274           for (f = 0; f < numFaceCases; ++f) {
275             if (numFaceVertices[f] == meetSize) {
276               found = PETSC_TRUE;
277               break;
278             }
279           }
280         }
281         ierr = DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
282       }
283       if (found) ++off[cell-cStart+1];
284     }
285   }
286   /* Prefix sum */
287   for (cell = 1; cell <= numCells; ++cell) off[cell] += off[cell-1];
288 
289   if (adjacency) {
290     ierr = PetscMalloc1(off[numCells], &adj);CHKERRQ(ierr);
291     /* Get neighboring cells */
292     for (cell = cStart; cell < cEnd; ++cell) {
293       PetscInt numNeighbors = PETSC_DETERMINE, n;
294       PetscInt cellOffset   = 0;
295 
296       ierr = DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);CHKERRQ(ierr);
297       /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
298       for (n = 0; n < numNeighbors; ++n) {
299         PetscInt        cellPair[2];
300         PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
301         PetscInt        meetSize = 0;
302         const PetscInt *meet    = NULL;
303 
304         cellPair[0] = cell; cellPair[1] = neighborCells[n];
305         if (cellPair[0] == cellPair[1]) continue;
306         if (!found) {
307           ierr = DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
308           if (meetSize) {
309             PetscInt f;
310 
311             for (f = 0; f < numFaceCases; ++f) {
312               if (numFaceVertices[f] == meetSize) {
313                 found = PETSC_TRUE;
314                 break;
315               }
316             }
317           }
318           ierr = DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
319         }
320         if (found) {
321           adj[off[cell-cStart]+cellOffset] = neighborCells[n];
322           ++cellOffset;
323         }
324       }
325     }
326   }
327   ierr = PetscFree(neighborCells);CHKERRQ(ierr);
328   if (numVertices) *numVertices = numCells;
329   if (offsets)   *offsets   = off;
330   if (adjacency) *adjacency = adj;
331   PetscFunctionReturn(0);
332 }
333 
334 /*@C
335   PetscPartitionerRegister - Adds a new PetscPartitioner implementation
336 
337   Not Collective
338 
339   Input Parameters:
340 + name        - The name of a new user-defined creation routine
341 - create_func - The creation routine itself
342 
343   Notes:
344   PetscPartitionerRegister() may be called multiple times to add several user-defined PetscPartitioners
345 
346   Sample usage:
347 .vb
348     PetscPartitionerRegister("my_part", MyPetscPartitionerCreate);
349 .ve
350 
351   Then, your PetscPartitioner type can be chosen with the procedural interface via
352 .vb
353     PetscPartitionerCreate(MPI_Comm, PetscPartitioner *);
354     PetscPartitionerSetType(PetscPartitioner, "my_part");
355 .ve
356    or at runtime via the option
357 .vb
358     -petscpartitioner_type my_part
359 .ve
360 
361   Level: advanced
362 
363 .keywords: PetscPartitioner, register
364 .seealso: PetscPartitionerRegisterAll(), PetscPartitionerRegisterDestroy()
365 
366 @*/
367 PetscErrorCode PetscPartitionerRegister(const char sname[], PetscErrorCode (*function)(PetscPartitioner))
368 {
369   PetscErrorCode ierr;
370 
371   PetscFunctionBegin;
372   ierr = PetscFunctionListAdd(&PetscPartitionerList, sname, function);CHKERRQ(ierr);
373   PetscFunctionReturn(0);
374 }
375 
376 /*@C
377   PetscPartitionerSetType - Builds a particular PetscPartitioner
378 
379   Collective on PetscPartitioner
380 
381   Input Parameters:
382 + part - The PetscPartitioner object
383 - name - The kind of partitioner
384 
385   Options Database Key:
386 . -petscpartitioner_type <type> - Sets the PetscPartitioner type; use -help for a list of available types
387 
388   Level: intermediate
389 
390 .keywords: PetscPartitioner, set, type
391 .seealso: PetscPartitionerGetType(), PetscPartitionerCreate()
392 @*/
393 PetscErrorCode PetscPartitionerSetType(PetscPartitioner part, PetscPartitionerType name)
394 {
395   PetscErrorCode (*r)(PetscPartitioner);
396   PetscBool      match;
397   PetscErrorCode ierr;
398 
399   PetscFunctionBegin;
400   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
401   ierr = PetscObjectTypeCompare((PetscObject) part, name, &match);CHKERRQ(ierr);
402   if (match) PetscFunctionReturn(0);
403 
404   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
405   ierr = PetscFunctionListFind(PetscPartitionerList, name, &r);CHKERRQ(ierr);
406   if (!r) SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown PetscPartitioner type: %s", name);
407 
408   if (part->ops->destroy) {
409     ierr              = (*part->ops->destroy)(part);CHKERRQ(ierr);
410     part->ops->destroy = NULL;
411   }
412   ierr = (*r)(part);CHKERRQ(ierr);
413   ierr = PetscObjectChangeTypeName((PetscObject) part, name);CHKERRQ(ierr);
414   PetscFunctionReturn(0);
415 }
416 
417 /*@C
418   PetscPartitionerGetType - Gets the PetscPartitioner type name (as a string) from the object.
419 
420   Not Collective
421 
422   Input Parameter:
423 . part - The PetscPartitioner
424 
425   Output Parameter:
426 . name - The PetscPartitioner type name
427 
428   Level: intermediate
429 
430 .keywords: PetscPartitioner, get, type, name
431 .seealso: PetscPartitionerSetType(), PetscPartitionerCreate()
432 @*/
433 PetscErrorCode PetscPartitionerGetType(PetscPartitioner part, PetscPartitionerType *name)
434 {
435   PetscErrorCode ierr;
436 
437   PetscFunctionBegin;
438   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
439   PetscValidPointer(name, 2);
440   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
441   *name = ((PetscObject) part)->type_name;
442   PetscFunctionReturn(0);
443 }
444 
445 /*@C
446   PetscPartitionerView - Views a PetscPartitioner
447 
448   Collective on PetscPartitioner
449 
450   Input Parameter:
451 + part - the PetscPartitioner object to view
452 - v    - the viewer
453 
454   Level: developer
455 
456 .seealso: PetscPartitionerDestroy()
457 @*/
458 PetscErrorCode PetscPartitionerView(PetscPartitioner part, PetscViewer v)
459 {
460   PetscErrorCode ierr;
461 
462   PetscFunctionBegin;
463   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
464   if (!v) {ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject) part), &v);CHKERRQ(ierr);}
465   if (part->ops->view) {ierr = (*part->ops->view)(part, v);CHKERRQ(ierr);}
466   PetscFunctionReturn(0);
467 }
468 
469 static PetscErrorCode PetscPartitionerGetDefaultType(const char *currentType, const char **defaultType)
470 {
471   PetscFunctionBegin;
472   if (!currentType) {
473 #if defined(PETSC_HAVE_CHACO)
474     *defaultType = PETSCPARTITIONERCHACO;
475 #elif defined(PETSC_HAVE_PARMETIS)
476     *defaultType = PETSCPARTITIONERPARMETIS;
477 #else
478     *defaultType = PETSCPARTITIONERSIMPLE;
479 #endif
480   } else {
481     *defaultType = currentType;
482   }
483   PetscFunctionReturn(0);
484 }
485 
486 PetscErrorCode PetscPartitionerSetTypeFromOptions_Internal(PetscPartitioner part)
487 {
488   const char    *defaultType;
489   char           name[256];
490   PetscBool      flg;
491   PetscErrorCode ierr;
492 
493   PetscFunctionBegin;
494   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
495   ierr = PetscPartitionerGetDefaultType(((PetscObject) part)->type_name,&defaultType);CHKERRQ(ierr);
496   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
497 
498   ierr = PetscObjectOptionsBegin((PetscObject) part);CHKERRQ(ierr);
499   ierr = PetscOptionsFList("-petscpartitioner_type", "Graph partitioner", "PetscPartitionerSetType", PetscPartitionerList, defaultType, name, 256, &flg);CHKERRQ(ierr);
500   if (flg) {
501     ierr = PetscPartitionerSetType(part, name);CHKERRQ(ierr);
502   } else if (!((PetscObject) part)->type_name) {
503     ierr = PetscPartitionerSetType(part, defaultType);CHKERRQ(ierr);
504   }
505   ierr = PetscOptionsEnd();CHKERRQ(ierr);
506   PetscFunctionReturn(0);
507 }
508 
509 /*@
510   PetscPartitionerSetFromOptions - sets parameters in a PetscPartitioner from the options database
511 
512   Collective on PetscPartitioner
513 
514   Input Parameter:
515 . part - the PetscPartitioner object to set options for
516 
517   Level: developer
518 
519 .seealso: PetscPartitionerView()
520 @*/
521 PetscErrorCode PetscPartitionerSetFromOptions(PetscPartitioner part)
522 {
523   PetscErrorCode ierr;
524 
525   PetscFunctionBegin;
526   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
527   ierr = PetscPartitionerSetTypeFromOptions_Internal(part);CHKERRQ(ierr);
528 
529   ierr = PetscObjectOptionsBegin((PetscObject) part);CHKERRQ(ierr);
530   if (part->ops->setfromoptions) {ierr = (*part->ops->setfromoptions)(PetscOptionsObject,part);CHKERRQ(ierr);}
531   /* process any options handlers added with PetscObjectAddOptionsHandler() */
532   ierr = PetscObjectProcessOptionsHandlers(PetscOptionsObject,(PetscObject) part);CHKERRQ(ierr);
533   ierr = PetscOptionsEnd();CHKERRQ(ierr);
534   ierr = PetscPartitionerViewFromOptions(part, NULL, "-petscpartitioner_view");CHKERRQ(ierr);
535   PetscFunctionReturn(0);
536 }
537 
538 /*@C
539   PetscPartitionerSetUp - Construct data structures for the PetscPartitioner
540 
541   Collective on PetscPartitioner
542 
543   Input Parameter:
544 . part - the PetscPartitioner object to setup
545 
546   Level: developer
547 
548 .seealso: PetscPartitionerView(), PetscPartitionerDestroy()
549 @*/
550 PetscErrorCode PetscPartitionerSetUp(PetscPartitioner part)
551 {
552   PetscErrorCode ierr;
553 
554   PetscFunctionBegin;
555   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
556   if (part->ops->setup) {ierr = (*part->ops->setup)(part);CHKERRQ(ierr);}
557   PetscFunctionReturn(0);
558 }
559 
560 /*@
561   PetscPartitionerDestroy - Destroys a PetscPartitioner object
562 
563   Collective on PetscPartitioner
564 
565   Input Parameter:
566 . part - the PetscPartitioner object to destroy
567 
568   Level: developer
569 
570 .seealso: PetscPartitionerView()
571 @*/
572 PetscErrorCode PetscPartitionerDestroy(PetscPartitioner *part)
573 {
574   PetscErrorCode ierr;
575 
576   PetscFunctionBegin;
577   if (!*part) PetscFunctionReturn(0);
578   PetscValidHeaderSpecific((*part), PETSCPARTITIONER_CLASSID, 1);
579 
580   if (--((PetscObject)(*part))->refct > 0) {*part = 0; PetscFunctionReturn(0);}
581   ((PetscObject) (*part))->refct = 0;
582 
583   if ((*part)->ops->destroy) {ierr = (*(*part)->ops->destroy)(*part);CHKERRQ(ierr);}
584   ierr = PetscHeaderDestroy(part);CHKERRQ(ierr);
585   PetscFunctionReturn(0);
586 }
587 
588 /*@
589   PetscPartitionerCreate - Creates an empty PetscPartitioner object. The type can then be set with PetscPartitionerSetType().
590 
591   Collective on MPI_Comm
592 
593   Input Parameter:
594 . comm - The communicator for the PetscPartitioner object
595 
596   Output Parameter:
597 . part - The PetscPartitioner object
598 
599   Level: beginner
600 
601 .seealso: PetscPartitionerSetType(), PETSCPARTITIONERCHACO, PETSCPARTITIONERPARMETIS, PETSCPARTITIONERSHELL, PETSCPARTITIONERSIMPLE, PETSCPARTITIONERGATHER
602 @*/
603 PetscErrorCode PetscPartitionerCreate(MPI_Comm comm, PetscPartitioner *part)
604 {
605   PetscPartitioner p;
606   const char       *partitionerType = NULL;
607   PetscErrorCode   ierr;
608 
609   PetscFunctionBegin;
610   PetscValidPointer(part, 2);
611   *part = NULL;
612   ierr = DMInitializePackage();CHKERRQ(ierr);
613 
614   ierr = PetscHeaderCreate(p, PETSCPARTITIONER_CLASSID, "PetscPartitioner", "Graph Partitioner", "PetscPartitioner", comm, PetscPartitionerDestroy, PetscPartitionerView);CHKERRQ(ierr);
615   ierr = PetscPartitionerGetDefaultType(NULL,&partitionerType);CHKERRQ(ierr);
616   ierr = PetscPartitionerSetType(p,partitionerType);CHKERRQ(ierr);
617 
618   *part = p;
619   PetscFunctionReturn(0);
620 }
621 
622 /*@
623   PetscPartitionerPartition - Create a non-overlapping partition of the cells in the mesh
624 
625   Collective on DM
626 
627   Input Parameters:
628 + part    - The PetscPartitioner
629 - dm      - The mesh DM
630 
631   Output Parameters:
632 + partSection     - The PetscSection giving the division of points by partition
633 - partition       - The list of points by partition
634 
635   Note: Instead of cells, points at a given height can be partitioned by calling PetscPartitionerSetPointHeight()
636 
637   Level: developer
638 
639 .seealso DMPlexDistribute(), PetscPartitionerSetPointHeight(), PetscPartitionerCreate()
640 @*/
641 PetscErrorCode PetscPartitionerPartition(PetscPartitioner part, DM dm, PetscSection partSection, IS *partition)
642 {
643   PetscMPIInt    size;
644   PetscErrorCode ierr;
645 
646   PetscFunctionBegin;
647   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
648   PetscValidHeaderSpecific(dm, DM_CLASSID, 2);
649   PetscValidHeaderSpecific(partSection, PETSC_SECTION_CLASSID, 4);
650   PetscValidPointer(partition, 5);
651   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) part), &size);CHKERRQ(ierr);
652   if (size == 1) {
653     PetscInt *points;
654     PetscInt  cStart, cEnd, c;
655 
656     ierr = DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);CHKERRQ(ierr);
657     ierr = PetscSectionSetChart(partSection, 0, size);CHKERRQ(ierr);
658     ierr = PetscSectionSetDof(partSection, 0, cEnd-cStart);CHKERRQ(ierr);
659     ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
660     ierr = PetscMalloc1(cEnd-cStart, &points);CHKERRQ(ierr);
661     for (c = cStart; c < cEnd; ++c) points[c] = c;
662     ierr = ISCreateGeneral(PetscObjectComm((PetscObject) part), cEnd-cStart, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
663     PetscFunctionReturn(0);
664   }
665   if (part->height == 0) {
666     PetscInt numVertices;
667     PetscInt *start     = NULL;
668     PetscInt *adjacency = NULL;
669     IS       globalNumbering;
670 
671     ierr = DMPlexCreatePartitionerGraph(dm, 0, &numVertices, &start, &adjacency, &globalNumbering);CHKERRQ(ierr);
672     if (!part->ops->partition) SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_WRONGSTATE, "PetscPartitioner has no type");
673     ierr = (*part->ops->partition)(part, dm, size, numVertices, start, adjacency, partSection, partition);CHKERRQ(ierr);
674     ierr = PetscFree(start);CHKERRQ(ierr);
675     ierr = PetscFree(adjacency);CHKERRQ(ierr);
676     if (globalNumbering) { /* partition is wrt global unique numbering: change this to be wrt local numbering */
677       const PetscInt *globalNum;
678       const PetscInt *partIdx;
679       PetscInt *map, cStart, cEnd;
680       PetscInt *adjusted, i, localSize, offset;
681       IS    newPartition;
682 
683       ierr = ISGetLocalSize(*partition,&localSize);CHKERRQ(ierr);
684       ierr = PetscMalloc1(localSize,&adjusted);CHKERRQ(ierr);
685       ierr = ISGetIndices(globalNumbering,&globalNum);CHKERRQ(ierr);
686       ierr = ISGetIndices(*partition,&partIdx);CHKERRQ(ierr);
687       ierr = PetscMalloc1(localSize,&map);CHKERRQ(ierr);
688       ierr = DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);CHKERRQ(ierr);
689       for (i = cStart, offset = 0; i < cEnd; i++) {
690         if (globalNum[i - cStart] >= 0) map[offset++] = i;
691       }
692       for (i = 0; i < localSize; i++) {
693         adjusted[i] = map[partIdx[i]];
694       }
695       ierr = PetscFree(map);CHKERRQ(ierr);
696       ierr = ISRestoreIndices(*partition,&partIdx);CHKERRQ(ierr);
697       ierr = ISRestoreIndices(globalNumbering,&globalNum);CHKERRQ(ierr);
698       ierr = ISCreateGeneral(PETSC_COMM_SELF,localSize,adjusted,PETSC_OWN_POINTER,&newPartition);CHKERRQ(ierr);
699       ierr = ISDestroy(&globalNumbering);CHKERRQ(ierr);
700       ierr = ISDestroy(partition);CHKERRQ(ierr);
701       *partition = newPartition;
702     }
703   } else SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_OUTOFRANGE, "Invalid height %D for points to partition", part->height);
704   PetscFunctionReturn(0);
705 
706 }
707 
708 static PetscErrorCode PetscPartitionerDestroy_Shell(PetscPartitioner part)
709 {
710   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
711   PetscErrorCode          ierr;
712 
713   PetscFunctionBegin;
714   ierr = PetscSectionDestroy(&p->section);CHKERRQ(ierr);
715   ierr = ISDestroy(&p->partition);CHKERRQ(ierr);
716   ierr = PetscFree(p);CHKERRQ(ierr);
717   PetscFunctionReturn(0);
718 }
719 
720 static PetscErrorCode PetscPartitionerView_Shell_Ascii(PetscPartitioner part, PetscViewer viewer)
721 {
722   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
723   PetscViewerFormat       format;
724   PetscErrorCode          ierr;
725 
726   PetscFunctionBegin;
727   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
728   ierr = PetscViewerASCIIPrintf(viewer, "Shell Graph Partitioner:\n");CHKERRQ(ierr);
729   if (p->random) {
730     ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
731     ierr = PetscViewerASCIIPrintf(viewer, "using random partition\n");CHKERRQ(ierr);
732     ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
733   }
734   PetscFunctionReturn(0);
735 }
736 
737 static PetscErrorCode PetscPartitionerView_Shell(PetscPartitioner part, PetscViewer viewer)
738 {
739   PetscBool      iascii;
740   PetscErrorCode ierr;
741 
742   PetscFunctionBegin;
743   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
744   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
745   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
746   if (iascii) {ierr = PetscPartitionerView_Shell_Ascii(part, viewer);CHKERRQ(ierr);}
747   PetscFunctionReturn(0);
748 }
749 
750 static PetscErrorCode PetscPartitionerSetFromOptions_Shell(PetscOptionItems *PetscOptionsObject, PetscPartitioner part)
751 {
752   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
753   PetscErrorCode          ierr;
754 
755   PetscFunctionBegin;
756   ierr = PetscOptionsHead(PetscOptionsObject, "PetscPartitionerShell Options");CHKERRQ(ierr);
757   ierr = PetscOptionsBool("-petscpartitioner_shell_random", "Use a random partition", "PetscPartitionerView", PETSC_FALSE, &p->random, NULL);CHKERRQ(ierr);
758   ierr = PetscOptionsTail();CHKERRQ(ierr);
759   PetscFunctionReturn(0);
760 }
761 
762 static PetscErrorCode PetscPartitionerPartition_Shell(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
763 {
764   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
765   PetscInt                np;
766   PetscErrorCode          ierr;
767 
768   PetscFunctionBegin;
769   if (p->random) {
770     PetscRandom r;
771     PetscInt   *sizes, *points, v, p;
772     PetscMPIInt rank;
773 
774     ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
775     ierr = PetscRandomCreate(PETSC_COMM_SELF, &r);CHKERRQ(ierr);
776     ierr = PetscRandomSetInterval(r, 0.0, (PetscScalar) nparts);CHKERRQ(ierr);
777     ierr = PetscRandomSetFromOptions(r);CHKERRQ(ierr);
778     ierr = PetscCalloc2(nparts, &sizes, numVertices, &points);CHKERRQ(ierr);
779     for (v = 0; v < numVertices; ++v) {points[v] = v;}
780     for (p = 0; p < nparts; ++p) {sizes[p] = numVertices/nparts + (PetscInt) (p < numVertices % nparts);}
781     for (v = numVertices-1; v > 0; --v) {
782       PetscReal val;
783       PetscInt  w, tmp;
784 
785       ierr = PetscRandomSetInterval(r, 0.0, (PetscScalar) (v+1));CHKERRQ(ierr);
786       ierr = PetscRandomGetValueReal(r, &val);CHKERRQ(ierr);
787       w    = PetscFloorReal(val);
788       tmp       = points[v];
789       points[v] = points[w];
790       points[w] = tmp;
791     }
792     ierr = PetscRandomDestroy(&r);CHKERRQ(ierr);
793     ierr = PetscPartitionerShellSetPartition(part, nparts, sizes, points);CHKERRQ(ierr);
794     ierr = PetscFree2(sizes, points);CHKERRQ(ierr);
795   }
796   if (!p->section) SETERRQ(PetscObjectComm((PetscObject) dm), PETSC_ERR_ARG_WRONG, "Shell partitioner information not provided. Please call PetscPartitionerShellSetPartition()");
797   ierr = PetscSectionGetChart(p->section, NULL, &np);CHKERRQ(ierr);
798   if (nparts != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of requested partitions %d != configured partitions %d", nparts, np);
799   ierr = ISGetLocalSize(p->partition, &np);CHKERRQ(ierr);
800   if (numVertices != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of input vertices %d != configured vertices %d", numVertices, np);
801   ierr = PetscSectionCopy(p->section, partSection);CHKERRQ(ierr);
802   *partition = p->partition;
803   ierr = PetscObjectReference((PetscObject) p->partition);CHKERRQ(ierr);
804   PetscFunctionReturn(0);
805 }
806 
807 static PetscErrorCode PetscPartitionerInitialize_Shell(PetscPartitioner part)
808 {
809   PetscFunctionBegin;
810   part->ops->view           = PetscPartitionerView_Shell;
811   part->ops->setfromoptions = PetscPartitionerSetFromOptions_Shell;
812   part->ops->destroy        = PetscPartitionerDestroy_Shell;
813   part->ops->partition      = PetscPartitionerPartition_Shell;
814   PetscFunctionReturn(0);
815 }
816 
817 /*MC
818   PETSCPARTITIONERSHELL = "shell" - A PetscPartitioner object
819 
820   Level: intermediate
821 
822 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
823 M*/
824 
825 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Shell(PetscPartitioner part)
826 {
827   PetscPartitioner_Shell *p;
828   PetscErrorCode          ierr;
829 
830   PetscFunctionBegin;
831   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
832   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
833   part->data = p;
834 
835   ierr = PetscPartitionerInitialize_Shell(part);CHKERRQ(ierr);
836   p->random = PETSC_FALSE;
837   PetscFunctionReturn(0);
838 }
839 
840 /*@C
841   PetscPartitionerShellSetPartition - Set an artifical partition for a mesh
842 
843   Collective on Part
844 
845   Input Parameters:
846 + part     - The PetscPartitioner
847 . size - The number of partitions
848 . sizes    - array of size size (or NULL) providing the number of points in each partition
849 - points   - array of size sum(sizes) (may be NULL iff sizes is NULL), a permutation of the points that groups those assigned to each partition in order (i.e., partition 0 first, partition 1 next, etc.)
850 
851   Level: developer
852 
853   Notes:
854 
855     It is safe to free the sizes and points arrays after use in this routine.
856 
857 .seealso DMPlexDistribute(), PetscPartitionerCreate()
858 @*/
859 PetscErrorCode PetscPartitionerShellSetPartition(PetscPartitioner part, PetscInt size, const PetscInt sizes[], const PetscInt points[])
860 {
861   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
862   PetscInt                proc, numPoints;
863   PetscErrorCode          ierr;
864 
865   PetscFunctionBegin;
866   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
867   if (sizes)  {PetscValidPointer(sizes, 3);}
868   if (points) {PetscValidPointer(points, 4);}
869   ierr = PetscSectionDestroy(&p->section);CHKERRQ(ierr);
870   ierr = ISDestroy(&p->partition);CHKERRQ(ierr);
871   ierr = PetscSectionCreate(PetscObjectComm((PetscObject) part), &p->section);CHKERRQ(ierr);
872   ierr = PetscSectionSetChart(p->section, 0, size);CHKERRQ(ierr);
873   if (sizes) {
874     for (proc = 0; proc < size; ++proc) {
875       ierr = PetscSectionSetDof(p->section, proc, sizes[proc]);CHKERRQ(ierr);
876     }
877   }
878   ierr = PetscSectionSetUp(p->section);CHKERRQ(ierr);
879   ierr = PetscSectionGetStorageSize(p->section, &numPoints);CHKERRQ(ierr);
880   ierr = ISCreateGeneral(PetscObjectComm((PetscObject) part), numPoints, points, PETSC_COPY_VALUES, &p->partition);CHKERRQ(ierr);
881   PetscFunctionReturn(0);
882 }
883 
884 /*@
885   PetscPartitionerShellSetRandom - Set the flag to use a random partition
886 
887   Collective on Part
888 
889   Input Parameters:
890 + part   - The PetscPartitioner
891 - random - The flag to use a random partition
892 
893   Level: intermediate
894 
895 .seealso PetscPartitionerShellGetRandom(), PetscPartitionerCreate()
896 @*/
897 PetscErrorCode PetscPartitionerShellSetRandom(PetscPartitioner part, PetscBool random)
898 {
899   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
900 
901   PetscFunctionBegin;
902   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
903   p->random = random;
904   PetscFunctionReturn(0);
905 }
906 
907 /*@
908   PetscPartitionerShellGetRandom - get the flag to use a random partition
909 
910   Collective on Part
911 
912   Input Parameter:
913 . part   - The PetscPartitioner
914 
915   Output Parameter
916 . random - The flag to use a random partition
917 
918   Level: intermediate
919 
920 .seealso PetscPartitionerShellSetRandom(), PetscPartitionerCreate()
921 @*/
922 PetscErrorCode PetscPartitionerShellGetRandom(PetscPartitioner part, PetscBool *random)
923 {
924   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
925 
926   PetscFunctionBegin;
927   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
928   PetscValidPointer(random, 2);
929   *random = p->random;
930   PetscFunctionReturn(0);
931 }
932 
933 static PetscErrorCode PetscPartitionerDestroy_Simple(PetscPartitioner part)
934 {
935   PetscPartitioner_Simple *p = (PetscPartitioner_Simple *) part->data;
936   PetscErrorCode          ierr;
937 
938   PetscFunctionBegin;
939   ierr = PetscFree(p);CHKERRQ(ierr);
940   PetscFunctionReturn(0);
941 }
942 
943 static PetscErrorCode PetscPartitionerView_Simple_Ascii(PetscPartitioner part, PetscViewer viewer)
944 {
945   PetscViewerFormat format;
946   PetscErrorCode    ierr;
947 
948   PetscFunctionBegin;
949   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
950   ierr = PetscViewerASCIIPrintf(viewer, "Simple Graph Partitioner:\n");CHKERRQ(ierr);
951   PetscFunctionReturn(0);
952 }
953 
954 static PetscErrorCode PetscPartitionerView_Simple(PetscPartitioner part, PetscViewer viewer)
955 {
956   PetscBool      iascii;
957   PetscErrorCode ierr;
958 
959   PetscFunctionBegin;
960   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
961   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
962   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
963   if (iascii) {ierr = PetscPartitionerView_Simple_Ascii(part, viewer);CHKERRQ(ierr);}
964   PetscFunctionReturn(0);
965 }
966 
967 static PetscErrorCode PetscPartitionerPartition_Simple(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
968 {
969   MPI_Comm       comm;
970   PetscInt       np;
971   PetscMPIInt    size;
972   PetscErrorCode ierr;
973 
974   PetscFunctionBegin;
975   comm = PetscObjectComm((PetscObject)dm);
976   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
977   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
978   ierr = ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);CHKERRQ(ierr);
979   if (size == 1) {
980     for (np = 0; np < nparts; ++np) {ierr = PetscSectionSetDof(partSection, np, numVertices/nparts + ((numVertices % nparts) > np));CHKERRQ(ierr);}
981   }
982   else {
983     PetscMPIInt rank;
984     PetscInt nvGlobal, *offsets, myFirst, myLast;
985 
986     ierr = PetscMalloc1(size+1,&offsets);CHKERRQ(ierr);
987     offsets[0] = 0;
988     ierr = MPI_Allgather(&numVertices,1,MPIU_INT,&offsets[1],1,MPIU_INT,comm);CHKERRQ(ierr);
989     for (np = 2; np <= size; np++) {
990       offsets[np] += offsets[np-1];
991     }
992     nvGlobal = offsets[size];
993     ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
994     myFirst = offsets[rank];
995     myLast  = offsets[rank + 1] - 1;
996     ierr = PetscFree(offsets);CHKERRQ(ierr);
997     if (numVertices) {
998       PetscInt firstPart = 0, firstLargePart = 0;
999       PetscInt lastPart = 0, lastLargePart = 0;
1000       PetscInt rem = nvGlobal % nparts;
1001       PetscInt pSmall = nvGlobal/nparts;
1002       PetscInt pBig = nvGlobal/nparts + 1;
1003 
1004 
1005       if (rem) {
1006         firstLargePart = myFirst / pBig;
1007         lastLargePart  = myLast  / pBig;
1008 
1009         if (firstLargePart < rem) {
1010           firstPart = firstLargePart;
1011         }
1012         else {
1013           firstPart = rem + (myFirst - (rem * pBig)) / pSmall;
1014         }
1015         if (lastLargePart < rem) {
1016           lastPart = lastLargePart;
1017         }
1018         else {
1019           lastPart = rem + (myLast - (rem * pBig)) / pSmall;
1020         }
1021       }
1022       else {
1023         firstPart = myFirst / (nvGlobal/nparts);
1024         lastPart  = myLast  / (nvGlobal/nparts);
1025       }
1026 
1027       for (np = firstPart; np <= lastPart; np++) {
1028         PetscInt PartStart =  np    * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np);
1029         PetscInt PartEnd   = (np+1) * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np+1);
1030 
1031         PartStart = PetscMax(PartStart,myFirst);
1032         PartEnd   = PetscMin(PartEnd,myLast+1);
1033         ierr = PetscSectionSetDof(partSection,np,PartEnd-PartStart);CHKERRQ(ierr);
1034       }
1035     }
1036   }
1037   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1038   PetscFunctionReturn(0);
1039 }
1040 
1041 static PetscErrorCode PetscPartitionerInitialize_Simple(PetscPartitioner part)
1042 {
1043   PetscFunctionBegin;
1044   part->ops->view      = PetscPartitionerView_Simple;
1045   part->ops->destroy   = PetscPartitionerDestroy_Simple;
1046   part->ops->partition = PetscPartitionerPartition_Simple;
1047   PetscFunctionReturn(0);
1048 }
1049 
1050 /*MC
1051   PETSCPARTITIONERSIMPLE = "simple" - A PetscPartitioner object
1052 
1053   Level: intermediate
1054 
1055 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1056 M*/
1057 
1058 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Simple(PetscPartitioner part)
1059 {
1060   PetscPartitioner_Simple *p;
1061   PetscErrorCode           ierr;
1062 
1063   PetscFunctionBegin;
1064   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1065   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1066   part->data = p;
1067 
1068   ierr = PetscPartitionerInitialize_Simple(part);CHKERRQ(ierr);
1069   PetscFunctionReturn(0);
1070 }
1071 
1072 static PetscErrorCode PetscPartitionerDestroy_Gather(PetscPartitioner part)
1073 {
1074   PetscPartitioner_Gather *p = (PetscPartitioner_Gather *) part->data;
1075   PetscErrorCode          ierr;
1076 
1077   PetscFunctionBegin;
1078   ierr = PetscFree(p);CHKERRQ(ierr);
1079   PetscFunctionReturn(0);
1080 }
1081 
1082 static PetscErrorCode PetscPartitionerView_Gather_Ascii(PetscPartitioner part, PetscViewer viewer)
1083 {
1084   PetscViewerFormat format;
1085   PetscErrorCode    ierr;
1086 
1087   PetscFunctionBegin;
1088   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
1089   ierr = PetscViewerASCIIPrintf(viewer, "Gather Graph Partitioner:\n");CHKERRQ(ierr);
1090   PetscFunctionReturn(0);
1091 }
1092 
1093 static PetscErrorCode PetscPartitionerView_Gather(PetscPartitioner part, PetscViewer viewer)
1094 {
1095   PetscBool      iascii;
1096   PetscErrorCode ierr;
1097 
1098   PetscFunctionBegin;
1099   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1100   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1101   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1102   if (iascii) {ierr = PetscPartitionerView_Gather_Ascii(part, viewer);CHKERRQ(ierr);}
1103   PetscFunctionReturn(0);
1104 }
1105 
1106 static PetscErrorCode PetscPartitionerPartition_Gather(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1107 {
1108   PetscInt       np;
1109   PetscErrorCode ierr;
1110 
1111   PetscFunctionBegin;
1112   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1113   ierr = ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);CHKERRQ(ierr);
1114   ierr = PetscSectionSetDof(partSection,0,numVertices);CHKERRQ(ierr);
1115   for (np = 1; np < nparts; ++np) {ierr = PetscSectionSetDof(partSection, np, 0);CHKERRQ(ierr);}
1116   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1117   PetscFunctionReturn(0);
1118 }
1119 
1120 static PetscErrorCode PetscPartitionerInitialize_Gather(PetscPartitioner part)
1121 {
1122   PetscFunctionBegin;
1123   part->ops->view      = PetscPartitionerView_Gather;
1124   part->ops->destroy   = PetscPartitionerDestroy_Gather;
1125   part->ops->partition = PetscPartitionerPartition_Gather;
1126   PetscFunctionReturn(0);
1127 }
1128 
1129 /*MC
1130   PETSCPARTITIONERGATHER = "gather" - A PetscPartitioner object
1131 
1132   Level: intermediate
1133 
1134 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1135 M*/
1136 
1137 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Gather(PetscPartitioner part)
1138 {
1139   PetscPartitioner_Gather *p;
1140   PetscErrorCode           ierr;
1141 
1142   PetscFunctionBegin;
1143   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1144   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1145   part->data = p;
1146 
1147   ierr = PetscPartitionerInitialize_Gather(part);CHKERRQ(ierr);
1148   PetscFunctionReturn(0);
1149 }
1150 
1151 
1152 static PetscErrorCode PetscPartitionerDestroy_Chaco(PetscPartitioner part)
1153 {
1154   PetscPartitioner_Chaco *p = (PetscPartitioner_Chaco *) part->data;
1155   PetscErrorCode          ierr;
1156 
1157   PetscFunctionBegin;
1158   ierr = PetscFree(p);CHKERRQ(ierr);
1159   PetscFunctionReturn(0);
1160 }
1161 
1162 static PetscErrorCode PetscPartitionerView_Chaco_Ascii(PetscPartitioner part, PetscViewer viewer)
1163 {
1164   PetscViewerFormat format;
1165   PetscErrorCode    ierr;
1166 
1167   PetscFunctionBegin;
1168   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
1169   ierr = PetscViewerASCIIPrintf(viewer, "Chaco Graph Partitioner:\n");CHKERRQ(ierr);
1170   PetscFunctionReturn(0);
1171 }
1172 
1173 static PetscErrorCode PetscPartitionerView_Chaco(PetscPartitioner part, PetscViewer viewer)
1174 {
1175   PetscBool      iascii;
1176   PetscErrorCode ierr;
1177 
1178   PetscFunctionBegin;
1179   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1180   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1181   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1182   if (iascii) {ierr = PetscPartitionerView_Chaco_Ascii(part, viewer);CHKERRQ(ierr);}
1183   PetscFunctionReturn(0);
1184 }
1185 
1186 #if defined(PETSC_HAVE_CHACO)
1187 #if defined(PETSC_HAVE_UNISTD_H)
1188 #include <unistd.h>
1189 #endif
1190 #if defined(PETSC_HAVE_CHACO_INT_ASSIGNMENT)
1191 #include <chaco.h>
1192 #else
1193 /* Older versions of Chaco do not have an include file */
1194 PETSC_EXTERN int interface(int nvtxs, int *start, int *adjacency, int *vwgts,
1195                        float *ewgts, float *x, float *y, float *z, char *outassignname,
1196                        char *outfilename, short *assignment, int architecture, int ndims_tot,
1197                        int mesh_dims[3], double *goal, int global_method, int local_method,
1198                        int rqi_flag, int vmax, int ndims, double eigtol, long seed);
1199 #endif
1200 extern int FREE_GRAPH;
1201 #endif
1202 
1203 static PetscErrorCode PetscPartitionerPartition_Chaco(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1204 {
1205 #if defined(PETSC_HAVE_CHACO)
1206   enum {DEFAULT_METHOD = 1, INERTIAL_METHOD = 3};
1207   MPI_Comm       comm;
1208   int            nvtxs          = numVertices; /* number of vertices in full graph */
1209   int           *vwgts          = NULL;   /* weights for all vertices */
1210   float         *ewgts          = NULL;   /* weights for all edges */
1211   float         *x              = NULL, *y = NULL, *z = NULL; /* coordinates for inertial method */
1212   char          *outassignname  = NULL;   /*  name of assignment output file */
1213   char          *outfilename    = NULL;   /* output file name */
1214   int            architecture   = 1;      /* 0 => hypercube, d => d-dimensional mesh */
1215   int            ndims_tot      = 0;      /* total number of cube dimensions to divide */
1216   int            mesh_dims[3];            /* dimensions of mesh of processors */
1217   double        *goal          = NULL;    /* desired set sizes for each set */
1218   int            global_method = 1;       /* global partitioning algorithm */
1219   int            local_method  = 1;       /* local partitioning algorithm */
1220   int            rqi_flag      = 0;       /* should I use RQI/Symmlq eigensolver? */
1221   int            vmax          = 200;     /* how many vertices to coarsen down to? */
1222   int            ndims         = 1;       /* number of eigenvectors (2^d sets) */
1223   double         eigtol        = 0.001;   /* tolerance on eigenvectors */
1224   long           seed          = 123636512; /* for random graph mutations */
1225 #if defined(PETSC_HAVE_CHACO_INT_ASSIGNMENT)
1226   int           *assignment;              /* Output partition */
1227 #else
1228   short int     *assignment;              /* Output partition */
1229 #endif
1230   int            fd_stdout, fd_pipe[2];
1231   PetscInt      *points;
1232   int            i, v, p;
1233   PetscErrorCode ierr;
1234 
1235   PetscFunctionBegin;
1236   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
1237   if (!numVertices) {
1238     ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1239     ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1240     ierr = ISCreateGeneral(comm, 0, NULL, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1241     PetscFunctionReturn(0);
1242   }
1243   FREE_GRAPH = 0;                         /* Do not let Chaco free my memory */
1244   for (i = 0; i < start[numVertices]; ++i) ++adjacency[i];
1245 
1246   if (global_method == INERTIAL_METHOD) {
1247     /* manager.createCellCoordinates(nvtxs, &x, &y, &z); */
1248     SETERRQ(comm, PETSC_ERR_SUP, "Inertial partitioning not yet supported");
1249   }
1250   mesh_dims[0] = nparts;
1251   mesh_dims[1] = 1;
1252   mesh_dims[2] = 1;
1253   ierr = PetscMalloc1(nvtxs, &assignment);CHKERRQ(ierr);
1254   /* Chaco outputs to stdout. We redirect this to a buffer. */
1255   /* TODO: check error codes for UNIX calls */
1256 #if defined(PETSC_HAVE_UNISTD_H)
1257   {
1258     int piperet;
1259     piperet = pipe(fd_pipe);
1260     if (piperet) SETERRQ(comm,PETSC_ERR_SYS,"Could not create pipe");
1261     fd_stdout = dup(1);
1262     close(1);
1263     dup2(fd_pipe[1], 1);
1264   }
1265 #endif
1266   ierr = interface(nvtxs, (int*) start, (int*) adjacency, vwgts, ewgts, x, y, z, outassignname, outfilename,
1267                    assignment, architecture, ndims_tot, mesh_dims, goal, global_method, local_method, rqi_flag,
1268                    vmax, ndims, eigtol, seed);
1269 #if defined(PETSC_HAVE_UNISTD_H)
1270   {
1271     char msgLog[10000];
1272     int  count;
1273 
1274     fflush(stdout);
1275     count = read(fd_pipe[0], msgLog, (10000-1)*sizeof(char));
1276     if (count < 0) count = 0;
1277     msgLog[count] = 0;
1278     close(1);
1279     dup2(fd_stdout, 1);
1280     close(fd_stdout);
1281     close(fd_pipe[0]);
1282     close(fd_pipe[1]);
1283     if (ierr) SETERRQ1(comm, PETSC_ERR_LIB, "Error in Chaco library: %s", msgLog);
1284   }
1285 #endif
1286   /* Convert to PetscSection+IS */
1287   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1288   for (v = 0; v < nvtxs; ++v) {
1289     ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);
1290   }
1291   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1292   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
1293   for (p = 0, i = 0; p < nparts; ++p) {
1294     for (v = 0; v < nvtxs; ++v) {
1295       if (assignment[v] == p) points[i++] = v;
1296     }
1297   }
1298   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1299   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1300   if (global_method == INERTIAL_METHOD) {
1301     /* manager.destroyCellCoordinates(nvtxs, &x, &y, &z); */
1302   }
1303   ierr = PetscFree(assignment);CHKERRQ(ierr);
1304   for (i = 0; i < start[numVertices]; ++i) --adjacency[i];
1305   PetscFunctionReturn(0);
1306 #else
1307   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-chaco.");
1308 #endif
1309 }
1310 
1311 static PetscErrorCode PetscPartitionerInitialize_Chaco(PetscPartitioner part)
1312 {
1313   PetscFunctionBegin;
1314   part->ops->view      = PetscPartitionerView_Chaco;
1315   part->ops->destroy   = PetscPartitionerDestroy_Chaco;
1316   part->ops->partition = PetscPartitionerPartition_Chaco;
1317   PetscFunctionReturn(0);
1318 }
1319 
1320 /*MC
1321   PETSCPARTITIONERCHACO = "chaco" - A PetscPartitioner object using the Chaco library
1322 
1323   Level: intermediate
1324 
1325 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1326 M*/
1327 
1328 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Chaco(PetscPartitioner part)
1329 {
1330   PetscPartitioner_Chaco *p;
1331   PetscErrorCode          ierr;
1332 
1333   PetscFunctionBegin;
1334   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1335   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1336   part->data = p;
1337 
1338   ierr = PetscPartitionerInitialize_Chaco(part);CHKERRQ(ierr);
1339   ierr = PetscCitationsRegister(ChacoPartitionerCitation, &ChacoPartitionercite);CHKERRQ(ierr);
1340   PetscFunctionReturn(0);
1341 }
1342 
1343 static PetscErrorCode PetscPartitionerDestroy_ParMetis(PetscPartitioner part)
1344 {
1345   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
1346   PetscErrorCode             ierr;
1347 
1348   PetscFunctionBegin;
1349   ierr = PetscFree(p);CHKERRQ(ierr);
1350   PetscFunctionReturn(0);
1351 }
1352 
1353 static PetscErrorCode PetscPartitionerView_ParMetis_Ascii(PetscPartitioner part, PetscViewer viewer)
1354 {
1355   PetscViewerFormat format;
1356   PetscErrorCode    ierr;
1357 
1358   PetscFunctionBegin;
1359   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
1360   ierr = PetscViewerASCIIPrintf(viewer, "ParMetis Graph Partitioner:\n");CHKERRQ(ierr);
1361   PetscFunctionReturn(0);
1362 }
1363 
1364 static PetscErrorCode PetscPartitionerView_ParMetis(PetscPartitioner part, PetscViewer viewer)
1365 {
1366   PetscBool      iascii;
1367   PetscErrorCode ierr;
1368 
1369   PetscFunctionBegin;
1370   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1371   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1372   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1373   if (iascii) {ierr = PetscPartitionerView_ParMetis_Ascii(part, viewer);CHKERRQ(ierr);}
1374   PetscFunctionReturn(0);
1375 }
1376 
1377 #if defined(PETSC_HAVE_PARMETIS)
1378 #include <parmetis.h>
1379 #endif
1380 
1381 static PetscErrorCode PetscPartitionerPartition_ParMetis(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1382 {
1383 #if defined(PETSC_HAVE_PARMETIS)
1384   MPI_Comm       comm;
1385   PetscSection   section;
1386   PetscInt       nvtxs      = numVertices; /* The number of vertices in full graph */
1387   PetscInt      *vtxdist;                  /* Distribution of vertices across processes */
1388   PetscInt      *xadj       = start;       /* Start of edge list for each vertex */
1389   PetscInt      *adjncy     = adjacency;   /* Edge lists for all vertices */
1390   PetscInt      *vwgt       = NULL;        /* Vertex weights */
1391   PetscInt      *adjwgt     = NULL;        /* Edge weights */
1392   PetscInt       wgtflag    = 0;           /* Indicates which weights are present */
1393   PetscInt       numflag    = 0;           /* Indicates initial offset (0 or 1) */
1394   PetscInt       ncon       = 1;           /* The number of weights per vertex */
1395   PetscReal     *tpwgts;                   /* The fraction of vertex weights assigned to each partition */
1396   PetscReal     *ubvec;                    /* The balance intolerance for vertex weights */
1397   PetscInt       options[5];               /* Options */
1398   /* Outputs */
1399   PetscInt       edgeCut;                  /* The number of edges cut by the partition */
1400   PetscInt      *assignment, *points;
1401   PetscMPIInt    rank, p, v, i;
1402   PetscErrorCode ierr;
1403 
1404   PetscFunctionBegin;
1405   ierr = PetscObjectGetComm((PetscObject) part, &comm);CHKERRQ(ierr);
1406   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1407   options[0] = 0; /* Use all defaults */
1408   /* Calculate vertex distribution */
1409   ierr = PetscMalloc5(nparts+1,&vtxdist,nparts*ncon,&tpwgts,ncon,&ubvec,nvtxs,&assignment,nvtxs,&vwgt);CHKERRQ(ierr);
1410   vtxdist[0] = 0;
1411   ierr = MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm);CHKERRQ(ierr);
1412   for (p = 2; p <= nparts; ++p) {
1413     vtxdist[p] += vtxdist[p-1];
1414   }
1415   /* Calculate weights */
1416   for (p = 0; p < nparts; ++p) {
1417     tpwgts[p] = 1.0/nparts;
1418   }
1419   ubvec[0] = 1.05;
1420   /* Weight cells by dofs on cell by default */
1421   ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
1422   if (section) {
1423     PetscInt cStart, cEnd, dof;
1424 
1425     ierr = DMPlexGetHeightStratum(dm, 0, &cStart, &cEnd);CHKERRQ(ierr);
1426     for (v = cStart; v < cEnd; ++v) {
1427       ierr = PetscSectionGetDof(section, v, &dof);CHKERRQ(ierr);
1428       /* WARNING: Assumes that meshes with overlap have the overlapped cells at the end of the stratum. */
1429       /* To do this properly, we should use the cell numbering created in DMPlexCreatePartitionerGraph. */
1430       if (v-cStart < numVertices) vwgt[v-cStart] = PetscMax(dof, 1);
1431     }
1432   } else {
1433     for (v = 0; v < nvtxs; ++v) vwgt[v] = 1;
1434   }
1435 
1436   if (nparts == 1) {
1437     ierr = PetscMemzero(assignment, nvtxs * sizeof(PetscInt));CHKERRQ(ierr);
1438   } else {
1439     if (vtxdist[1] == vtxdist[nparts]) {
1440       if (!rank) {
1441         PetscStackPush("METIS_PartGraphKway");
1442         ierr = METIS_PartGraphKway(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, adjwgt, &nparts, tpwgts, ubvec, NULL, &edgeCut, assignment);
1443         PetscStackPop;
1444         if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphKway()");
1445       }
1446     } else {
1447       PetscStackPush("ParMETIS_V3_PartKway");
1448       ierr = ParMETIS_V3_PartKway(vtxdist, xadj, adjncy, vwgt, adjwgt, &wgtflag, &numflag, &ncon, &nparts, tpwgts, ubvec, options, &edgeCut, assignment, &comm);
1449       PetscStackPop;
1450       if (ierr != METIS_OK) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error %d in ParMETIS_V3_PartKway()", ierr);
1451     }
1452   }
1453   /* Convert to PetscSection+IS */
1454   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1455   for (v = 0; v < nvtxs; ++v) {ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);}
1456   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1457   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
1458   for (p = 0, i = 0; p < nparts; ++p) {
1459     for (v = 0; v < nvtxs; ++v) {
1460       if (assignment[v] == p) points[i++] = v;
1461     }
1462   }
1463   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1464   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1465   ierr = PetscFree5(vtxdist,tpwgts,ubvec,assignment,vwgt);CHKERRQ(ierr);
1466   PetscFunctionReturn(0);
1467 #else
1468   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-parmetis.");
1469 #endif
1470 }
1471 
1472 static PetscErrorCode PetscPartitionerInitialize_ParMetis(PetscPartitioner part)
1473 {
1474   PetscFunctionBegin;
1475   part->ops->view      = PetscPartitionerView_ParMetis;
1476   part->ops->destroy   = PetscPartitionerDestroy_ParMetis;
1477   part->ops->partition = PetscPartitionerPartition_ParMetis;
1478   PetscFunctionReturn(0);
1479 }
1480 
1481 /*MC
1482   PETSCPARTITIONERPARMETIS = "parmetis" - A PetscPartitioner object using the ParMetis library
1483 
1484   Level: intermediate
1485 
1486 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1487 M*/
1488 
1489 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_ParMetis(PetscPartitioner part)
1490 {
1491   PetscPartitioner_ParMetis *p;
1492   PetscErrorCode          ierr;
1493 
1494   PetscFunctionBegin;
1495   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1496   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1497   part->data = p;
1498 
1499   ierr = PetscPartitionerInitialize_ParMetis(part);CHKERRQ(ierr);
1500   ierr = PetscCitationsRegister(ParMetisPartitionerCitation, &ParMetisPartitionercite);CHKERRQ(ierr);
1501   PetscFunctionReturn(0);
1502 }
1503 
1504 /*@
1505   DMPlexGetPartitioner - Get the mesh partitioner
1506 
1507   Not collective
1508 
1509   Input Parameter:
1510 . dm - The DM
1511 
1512   Output Parameter:
1513 . part - The PetscPartitioner
1514 
1515   Level: developer
1516 
1517   Note: This gets a borrowed reference, so the user should not destroy this PetscPartitioner.
1518 
1519 .seealso DMPlexDistribute(), DMPlexSetPartitioner(), PetscPartitionerCreate()
1520 @*/
1521 PetscErrorCode DMPlexGetPartitioner(DM dm, PetscPartitioner *part)
1522 {
1523   DM_Plex       *mesh = (DM_Plex *) dm->data;
1524 
1525   PetscFunctionBegin;
1526   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1527   PetscValidPointer(part, 2);
1528   *part = mesh->partitioner;
1529   PetscFunctionReturn(0);
1530 }
1531 
1532 /*@
1533   DMPlexSetPartitioner - Set the mesh partitioner
1534 
1535   logically collective on dm and part
1536 
1537   Input Parameters:
1538 + dm - The DM
1539 - part - The partitioner
1540 
1541   Level: developer
1542 
1543   Note: Any existing PetscPartitioner will be destroyed.
1544 
1545 .seealso DMPlexDistribute(), DMPlexGetPartitioner(), PetscPartitionerCreate()
1546 @*/
1547 PetscErrorCode DMPlexSetPartitioner(DM dm, PetscPartitioner part)
1548 {
1549   DM_Plex       *mesh = (DM_Plex *) dm->data;
1550   PetscErrorCode ierr;
1551 
1552   PetscFunctionBegin;
1553   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1554   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 2);
1555   ierr = PetscObjectReference((PetscObject)part);CHKERRQ(ierr);
1556   ierr = PetscPartitionerDestroy(&mesh->partitioner);CHKERRQ(ierr);
1557   mesh->partitioner = part;
1558   PetscFunctionReturn(0);
1559 }
1560 
1561 static PetscErrorCode DMPlexPartitionLabelClosure_Tree(DM dm, DMLabel label, PetscInt rank, PetscInt point, PetscBool up, PetscBool down)
1562 {
1563   PetscErrorCode ierr;
1564 
1565   PetscFunctionBegin;
1566   if (up) {
1567     PetscInt parent;
1568 
1569     ierr = DMPlexGetTreeParent(dm,point,&parent,NULL);CHKERRQ(ierr);
1570     if (parent != point) {
1571       PetscInt closureSize, *closure = NULL, i;
1572 
1573       ierr = DMPlexGetTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);CHKERRQ(ierr);
1574       for (i = 0; i < closureSize; i++) {
1575         PetscInt cpoint = closure[2*i];
1576 
1577         ierr = DMLabelSetValue(label,cpoint,rank);CHKERRQ(ierr);
1578         ierr = DMPlexPartitionLabelClosure_Tree(dm,label,rank,cpoint,PETSC_TRUE,PETSC_FALSE);CHKERRQ(ierr);
1579       }
1580       ierr = DMPlexRestoreTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);CHKERRQ(ierr);
1581     }
1582   }
1583   if (down) {
1584     PetscInt numChildren;
1585     const PetscInt *children;
1586 
1587     ierr = DMPlexGetTreeChildren(dm,point,&numChildren,&children);CHKERRQ(ierr);
1588     if (numChildren) {
1589       PetscInt i;
1590 
1591       for (i = 0; i < numChildren; i++) {
1592         PetscInt cpoint = children[i];
1593 
1594         ierr = DMLabelSetValue(label,cpoint,rank);CHKERRQ(ierr);
1595         ierr = DMPlexPartitionLabelClosure_Tree(dm,label,rank,cpoint,PETSC_FALSE,PETSC_TRUE);CHKERRQ(ierr);
1596       }
1597     }
1598   }
1599   PetscFunctionReturn(0);
1600 }
1601 
1602 /*@
1603   DMPlexPartitionLabelClosure - Add the closure of all points to the partition label
1604 
1605   Input Parameters:
1606 + dm     - The DM
1607 - label  - DMLabel assinging ranks to remote roots
1608 
1609   Level: developer
1610 
1611 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1612 @*/
1613 PetscErrorCode DMPlexPartitionLabelClosure(DM dm, DMLabel label)
1614 {
1615   IS              rankIS,   pointIS;
1616   const PetscInt *ranks,   *points;
1617   PetscInt        numRanks, numPoints, r, p, c, closureSize;
1618   PetscInt       *closure = NULL;
1619   PetscErrorCode  ierr;
1620 
1621   PetscFunctionBegin;
1622   ierr = DMLabelGetValueIS(label, &rankIS);CHKERRQ(ierr);
1623   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
1624   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
1625   for (r = 0; r < numRanks; ++r) {
1626     const PetscInt rank = ranks[r];
1627 
1628     ierr = DMLabelGetStratumIS(label, rank, &pointIS);CHKERRQ(ierr);
1629     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
1630     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
1631     for (p = 0; p < numPoints; ++p) {
1632       ierr = DMPlexGetTransitiveClosure(dm, points[p], PETSC_TRUE, &closureSize, &closure);CHKERRQ(ierr);
1633       for (c = 0; c < closureSize*2; c += 2) {
1634         ierr = DMLabelSetValue(label, closure[c], rank);CHKERRQ(ierr);
1635         ierr = DMPlexPartitionLabelClosure_Tree(dm,label,rank,closure[c],PETSC_TRUE,PETSC_TRUE);CHKERRQ(ierr);
1636       }
1637     }
1638     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
1639     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1640   }
1641   if (closure) {ierr = DMPlexRestoreTransitiveClosure(dm, 0, PETSC_TRUE, &closureSize, &closure);CHKERRQ(ierr);}
1642   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
1643   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
1644   PetscFunctionReturn(0);
1645 }
1646 
1647 /*@
1648   DMPlexPartitionLabelAdjacency - Add one level of adjacent points to the partition label
1649 
1650   Input Parameters:
1651 + dm     - The DM
1652 - label  - DMLabel assinging ranks to remote roots
1653 
1654   Level: developer
1655 
1656 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1657 @*/
1658 PetscErrorCode DMPlexPartitionLabelAdjacency(DM dm, DMLabel label)
1659 {
1660   IS              rankIS,   pointIS;
1661   const PetscInt *ranks,   *points;
1662   PetscInt        numRanks, numPoints, r, p, a, adjSize;
1663   PetscInt       *adj = NULL;
1664   PetscErrorCode  ierr;
1665 
1666   PetscFunctionBegin;
1667   ierr = DMLabelGetValueIS(label, &rankIS);CHKERRQ(ierr);
1668   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
1669   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
1670   for (r = 0; r < numRanks; ++r) {
1671     const PetscInt rank = ranks[r];
1672 
1673     ierr = DMLabelGetStratumIS(label, rank, &pointIS);CHKERRQ(ierr);
1674     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
1675     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
1676     for (p = 0; p < numPoints; ++p) {
1677       adjSize = PETSC_DETERMINE;
1678       ierr = DMPlexGetAdjacency(dm, points[p], &adjSize, &adj);CHKERRQ(ierr);
1679       for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(label, adj[a], rank);CHKERRQ(ierr);}
1680     }
1681     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
1682     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1683   }
1684   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
1685   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
1686   ierr = PetscFree(adj);CHKERRQ(ierr);
1687   PetscFunctionReturn(0);
1688 }
1689 
1690 /*@
1691   DMPlexPartitionLabelPropagate - Propagate points in a partition label over the point SF
1692 
1693   Input Parameters:
1694 + dm     - The DM
1695 - label  - DMLabel assinging ranks to remote roots
1696 
1697   Level: developer
1698 
1699   Note: This is required when generating multi-level overlaps to capture
1700   overlap points from non-neighbouring partitions.
1701 
1702 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1703 @*/
1704 PetscErrorCode DMPlexPartitionLabelPropagate(DM dm, DMLabel label)
1705 {
1706   MPI_Comm        comm;
1707   PetscMPIInt     rank;
1708   PetscSF         sfPoint;
1709   DMLabel         lblRoots, lblLeaves;
1710   IS              rankIS, pointIS;
1711   const PetscInt *ranks;
1712   PetscInt        numRanks, r;
1713   PetscErrorCode  ierr;
1714 
1715   PetscFunctionBegin;
1716   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
1717   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1718   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
1719   /* Pull point contributions from remote leaves into local roots */
1720   ierr = DMLabelGather(label, sfPoint, &lblLeaves);CHKERRQ(ierr);
1721   ierr = DMLabelGetValueIS(lblLeaves, &rankIS);CHKERRQ(ierr);
1722   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
1723   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
1724   for (r = 0; r < numRanks; ++r) {
1725     const PetscInt remoteRank = ranks[r];
1726     if (remoteRank == rank) continue;
1727     ierr = DMLabelGetStratumIS(lblLeaves, remoteRank, &pointIS);CHKERRQ(ierr);
1728     ierr = DMLabelInsertIS(label, pointIS, remoteRank);CHKERRQ(ierr);
1729     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1730   }
1731   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
1732   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
1733   ierr = DMLabelDestroy(&lblLeaves);CHKERRQ(ierr);
1734   /* Push point contributions from roots into remote leaves */
1735   ierr = DMLabelDistribute(label, sfPoint, &lblRoots);CHKERRQ(ierr);
1736   ierr = DMLabelGetValueIS(lblRoots, &rankIS);CHKERRQ(ierr);
1737   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
1738   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
1739   for (r = 0; r < numRanks; ++r) {
1740     const PetscInt remoteRank = ranks[r];
1741     if (remoteRank == rank) continue;
1742     ierr = DMLabelGetStratumIS(lblRoots, remoteRank, &pointIS);CHKERRQ(ierr);
1743     ierr = DMLabelInsertIS(label, pointIS, remoteRank);CHKERRQ(ierr);
1744     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1745   }
1746   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
1747   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
1748   ierr = DMLabelDestroy(&lblRoots);CHKERRQ(ierr);
1749   PetscFunctionReturn(0);
1750 }
1751 
1752 /*@
1753   DMPlexPartitionLabelInvert - Create a partition label of remote roots from a local root label
1754 
1755   Input Parameters:
1756 + dm        - The DM
1757 . rootLabel - DMLabel assinging ranks to local roots
1758 . processSF - A star forest mapping into the local index on each remote rank
1759 
1760   Output Parameter:
1761 - leafLabel - DMLabel assinging ranks to remote roots
1762 
1763   Note: The rootLabel defines a send pattern by mapping local points to remote target ranks. The
1764   resulting leafLabel is a receiver mapping of remote roots to their parent rank.
1765 
1766   Level: developer
1767 
1768 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1769 @*/
1770 PetscErrorCode DMPlexPartitionLabelInvert(DM dm, DMLabel rootLabel, PetscSF processSF, DMLabel leafLabel)
1771 {
1772   MPI_Comm           comm;
1773   PetscMPIInt        rank, size;
1774   PetscInt           p, n, numNeighbors, ssize, l, nleaves;
1775   PetscSF            sfPoint;
1776   PetscSFNode       *rootPoints, *leafPoints;
1777   PetscSection       rootSection, leafSection;
1778   const PetscSFNode *remote;
1779   const PetscInt    *local, *neighbors;
1780   IS                 valueIS;
1781   PetscErrorCode     ierr;
1782 
1783   PetscFunctionBegin;
1784   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
1785   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1786   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
1787   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
1788 
1789   /* Convert to (point, rank) and use actual owners */
1790   ierr = PetscSectionCreate(comm, &rootSection);CHKERRQ(ierr);
1791   ierr = PetscSectionSetChart(rootSection, 0, size);CHKERRQ(ierr);
1792   ierr = DMLabelGetValueIS(rootLabel, &valueIS);CHKERRQ(ierr);
1793   ierr = ISGetLocalSize(valueIS, &numNeighbors);CHKERRQ(ierr);
1794   ierr = ISGetIndices(valueIS, &neighbors);CHKERRQ(ierr);
1795   for (n = 0; n < numNeighbors; ++n) {
1796     PetscInt numPoints;
1797 
1798     ierr = DMLabelGetStratumSize(rootLabel, neighbors[n], &numPoints);CHKERRQ(ierr);
1799     ierr = PetscSectionAddDof(rootSection, neighbors[n], numPoints);CHKERRQ(ierr);
1800   }
1801   ierr = PetscSectionSetUp(rootSection);CHKERRQ(ierr);
1802   ierr = PetscSectionGetStorageSize(rootSection, &ssize);CHKERRQ(ierr);
1803   ierr = PetscMalloc1(ssize, &rootPoints);CHKERRQ(ierr);
1804   ierr = PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);CHKERRQ(ierr);
1805   for (n = 0; n < numNeighbors; ++n) {
1806     IS              pointIS;
1807     const PetscInt *points;
1808     PetscInt        off, numPoints, p;
1809 
1810     ierr = PetscSectionGetOffset(rootSection, neighbors[n], &off);CHKERRQ(ierr);
1811     ierr = DMLabelGetStratumIS(rootLabel, neighbors[n], &pointIS);CHKERRQ(ierr);
1812     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
1813     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
1814     for (p = 0; p < numPoints; ++p) {
1815       if (local) {ierr = PetscFindInt(points[p], nleaves, local, &l);CHKERRQ(ierr);}
1816       else       {l = -1;}
1817       if (l >= 0) {rootPoints[off+p] = remote[l];}
1818       else        {rootPoints[off+p].index = points[p]; rootPoints[off+p].rank = rank;}
1819     }
1820     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
1821     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1822   }
1823   ierr = ISRestoreIndices(valueIS, &neighbors);CHKERRQ(ierr);
1824   ierr = ISDestroy(&valueIS);CHKERRQ(ierr);
1825   /* Communicate overlap */
1826   ierr = PetscSectionCreate(comm, &leafSection);CHKERRQ(ierr);
1827   ierr = DMPlexDistributeData(dm, processSF, rootSection, MPIU_2INT, rootPoints, leafSection, (void**) &leafPoints);CHKERRQ(ierr);
1828   /* Filter remote contributions (ovLeafPoints) into the overlapSF */
1829   ierr = PetscSectionGetStorageSize(leafSection, &ssize);CHKERRQ(ierr);
1830   for (p = 0; p < ssize; p++) {
1831     ierr = DMLabelSetValue(leafLabel, leafPoints[p].index, leafPoints[p].rank);CHKERRQ(ierr);
1832   }
1833   ierr = PetscFree(rootPoints);CHKERRQ(ierr);
1834   ierr = PetscSectionDestroy(&rootSection);CHKERRQ(ierr);
1835   ierr = PetscFree(leafPoints);CHKERRQ(ierr);
1836   ierr = PetscSectionDestroy(&leafSection);CHKERRQ(ierr);
1837   PetscFunctionReturn(0);
1838 }
1839 
1840 /*@
1841   DMPlexPartitionLabelCreateSF - Create a star forest from a label that assigns ranks to points
1842 
1843   Input Parameters:
1844 + dm    - The DM
1845 . label - DMLabel assinging ranks to remote roots
1846 
1847   Output Parameter:
1848 - sf    - The star forest communication context encapsulating the defined mapping
1849 
1850   Note: The incoming label is a receiver mapping of remote points to their parent rank.
1851 
1852   Level: developer
1853 
1854 .seealso: DMPlexDistribute(), DMPlexCreateOverlap
1855 @*/
1856 PetscErrorCode DMPlexPartitionLabelCreateSF(DM dm, DMLabel label, PetscSF *sf)
1857 {
1858   PetscMPIInt     rank, size;
1859   PetscInt        n, numRemote, p, numPoints, pStart, pEnd, idx = 0;
1860   PetscSFNode    *remotePoints;
1861   IS              remoteRootIS;
1862   const PetscInt *remoteRoots;
1863   PetscErrorCode ierr;
1864 
1865   PetscFunctionBegin;
1866   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
1867   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &size);CHKERRQ(ierr);
1868 
1869   for (numRemote = 0, n = 0; n < size; ++n) {
1870     ierr = DMLabelGetStratumSize(label, n, &numPoints);CHKERRQ(ierr);
1871     numRemote += numPoints;
1872   }
1873   ierr = PetscMalloc1(numRemote, &remotePoints);CHKERRQ(ierr);
1874   /* Put owned points first */
1875   ierr = DMLabelGetStratumSize(label, rank, &numPoints);CHKERRQ(ierr);
1876   if (numPoints > 0) {
1877     ierr = DMLabelGetStratumIS(label, rank, &remoteRootIS);CHKERRQ(ierr);
1878     ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
1879     for (p = 0; p < numPoints; p++) {
1880       remotePoints[idx].index = remoteRoots[p];
1881       remotePoints[idx].rank = rank;
1882       idx++;
1883     }
1884     ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
1885     ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr);
1886   }
1887   /* Now add remote points */
1888   for (n = 0; n < size; ++n) {
1889     ierr = DMLabelGetStratumSize(label, n, &numPoints);CHKERRQ(ierr);
1890     if (numPoints <= 0 || n == rank) continue;
1891     ierr = DMLabelGetStratumIS(label, n, &remoteRootIS);CHKERRQ(ierr);
1892     ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
1893     for (p = 0; p < numPoints; p++) {
1894       remotePoints[idx].index = remoteRoots[p];
1895       remotePoints[idx].rank = n;
1896       idx++;
1897     }
1898     ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
1899     ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr);
1900   }
1901   ierr = PetscSFCreate(PetscObjectComm((PetscObject) dm), sf);CHKERRQ(ierr);
1902   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
1903   ierr = PetscSFSetGraph(*sf, pEnd-pStart, numRemote, NULL, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);CHKERRQ(ierr);
1904   PetscFunctionReturn(0);
1905 }
1906