xref: /petsc/src/dm/impls/plex/plexpartition.c (revision 13fef8f3aac1e4c8ade9384cb55ec528bb5c22d0)
1 #include <petsc/private/dmpleximpl.h>   /*I      "petscdmplex.h"   I*/
2 
3 PetscClassId PETSCPARTITIONER_CLASSID = 0;
4 
5 PetscFunctionList PetscPartitionerList              = NULL;
6 PetscBool         PetscPartitionerRegisterAllCalled = PETSC_FALSE;
7 
8 PetscBool ChacoPartitionercite = PETSC_FALSE;
9 const char ChacoPartitionerCitation[] = "@inproceedings{Chaco95,\n"
10                                "  author    = {Bruce Hendrickson and Robert Leland},\n"
11                                "  title     = {A multilevel algorithm for partitioning graphs},\n"
12                                "  booktitle = {Supercomputing '95: Proceedings of the 1995 ACM/IEEE Conference on Supercomputing (CDROM)},"
13                                "  isbn      = {0-89791-816-9},\n"
14                                "  pages     = {28},\n"
15                                "  doi       = {http://doi.acm.org/10.1145/224170.224228},\n"
16                                "  publisher = {ACM Press},\n"
17                                "  address   = {New York},\n"
18                                "  year      = {1995}\n}\n";
19 
20 PetscBool ParMetisPartitionercite = PETSC_FALSE;
21 const char ParMetisPartitionerCitation[] = "@article{KarypisKumar98,\n"
22                                "  author  = {George Karypis and Vipin Kumar},\n"
23                                "  title   = {A Parallel Algorithm for Multilevel Graph Partitioning and Sparse Matrix Ordering},\n"
24                                "  journal = {Journal of Parallel and Distributed Computing},\n"
25                                "  volume  = {48},\n"
26                                "  pages   = {71--85},\n"
27                                "  year    = {1998}\n}\n";
28 
29 /*@C
30   DMPlexCreatePartitionerGraph - Create a CSR graph of point connections for the partitioner
31 
32   Input Parameters:
33 + dm      - The mesh DM dm
34 - height  - Height of the strata from which to construct the graph
35 
36   Output Parameter:
37 + numVertices     - Number of vertices in the graph
38 . offsets         - Point offsets in the graph
39 . adjacency       - Point connectivity in the graph
40 - globalNumbering - A map from the local cell numbering to the global numbering used in "adjacency".  Negative indicates that the cell is a duplicate from another process.
41 
42   The user can control the definition of adjacency for the mesh using DMPlexGetAdjacencyUseCone() and
43   DMPlexSetAdjacencyUseClosure(). They should choose the combination appropriate for the function
44   representation on the mesh.
45 
46   Level: developer
47 
48 .seealso: PetscPartitionerGetType(), PetscPartitionerCreate(), DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure()
49 @*/
50 PetscErrorCode DMPlexCreatePartitionerGraph(DM dm, PetscInt height, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency, IS *globalNumbering)
51 {
52   PetscInt       p, pStart, pEnd, a, adjSize, idx, size, nroots;
53   PetscInt      *adj = NULL, *vOffsets = NULL, *graph = NULL;
54   IS             cellNumbering;
55   const PetscInt *cellNum;
56   PetscBool      useCone, useClosure;
57   PetscSection   section;
58   PetscSegBuffer adjBuffer;
59   PetscSF        sfPoint;
60   PetscMPIInt    rank;
61   PetscErrorCode ierr;
62 
63   PetscFunctionBegin;
64   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
65   ierr = DMPlexGetHeightStratum(dm, height, &pStart, &pEnd);CHKERRQ(ierr);
66   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
67   ierr = PetscSFGetGraph(sfPoint, &nroots, NULL, NULL, NULL);CHKERRQ(ierr);
68   /* Build adjacency graph via a section/segbuffer */
69   ierr = PetscSectionCreate(PetscObjectComm((PetscObject) dm), &section);CHKERRQ(ierr);
70   ierr = PetscSectionSetChart(section, pStart, pEnd);CHKERRQ(ierr);
71   ierr = PetscSegBufferCreate(sizeof(PetscInt),1000,&adjBuffer);CHKERRQ(ierr);
72   /* Always use FVM adjacency to create partitioner graph */
73   ierr = DMPlexGetAdjacencyUseCone(dm, &useCone);CHKERRQ(ierr);
74   ierr = DMPlexGetAdjacencyUseClosure(dm, &useClosure);CHKERRQ(ierr);
75   ierr = DMPlexSetAdjacencyUseCone(dm, PETSC_TRUE);CHKERRQ(ierr);
76   ierr = DMPlexSetAdjacencyUseClosure(dm, PETSC_FALSE);CHKERRQ(ierr);
77   ierr = DMPlexCreateCellNumbering_Internal(dm, PETSC_TRUE, &cellNumbering);CHKERRQ(ierr);
78   if (globalNumbering) {
79     ierr = PetscObjectReference((PetscObject)cellNumbering);CHKERRQ(ierr);
80     *globalNumbering = cellNumbering;
81   }
82   ierr = ISGetIndices(cellNumbering, &cellNum);CHKERRQ(ierr);
83   for (*numVertices = 0, p = pStart; p < pEnd; p++) {
84     /* Skip non-owned cells in parallel (ParMetis expects no overlap) */
85     if (nroots > 0) {if (cellNum[p] < 0) continue;}
86     adjSize = PETSC_DETERMINE;
87     ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
88     for (a = 0; a < adjSize; ++a) {
89       const PetscInt point = adj[a];
90       if (point != p && pStart <= point && point < pEnd) {
91         PetscInt *PETSC_RESTRICT pBuf;
92         ierr = PetscSectionAddDof(section, p, 1);CHKERRQ(ierr);
93         ierr = PetscSegBufferGetInts(adjBuffer, 1, &pBuf);CHKERRQ(ierr);
94         *pBuf = point;
95       }
96     }
97     (*numVertices)++;
98   }
99   ierr = DMPlexSetAdjacencyUseCone(dm, useCone);CHKERRQ(ierr);
100   ierr = DMPlexSetAdjacencyUseClosure(dm, useClosure);CHKERRQ(ierr);
101   /* Derive CSR graph from section/segbuffer */
102   ierr = PetscSectionSetUp(section);CHKERRQ(ierr);
103   ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr);
104   ierr = PetscMalloc1(*numVertices+1, &vOffsets);CHKERRQ(ierr);
105   for (idx = 0, p = pStart; p < pEnd; p++) {
106     if (nroots > 0) {if (cellNum[p] < 0) continue;}
107     ierr = PetscSectionGetOffset(section, p, &(vOffsets[idx++]));CHKERRQ(ierr);
108   }
109   vOffsets[*numVertices] = size;
110   if (offsets) *offsets = vOffsets;
111   ierr = PetscSegBufferExtractAlloc(adjBuffer, &graph);CHKERRQ(ierr);
112   {
113     ISLocalToGlobalMapping ltogCells;
114     PetscInt n, size, *cells_arr;
115     /* In parallel, apply a global cell numbering to the graph */
116     ierr = ISRestoreIndices(cellNumbering, &cellNum);CHKERRQ(ierr);
117     ierr = ISLocalToGlobalMappingCreateIS(cellNumbering, &ltogCells);CHKERRQ(ierr);
118     ierr = ISLocalToGlobalMappingGetSize(ltogCells, &size);CHKERRQ(ierr);
119     ierr = ISLocalToGlobalMappingGetIndices(ltogCells, (const PetscInt**)&cells_arr);CHKERRQ(ierr);
120     /* Convert to positive global cell numbers */
121     for (n=0; n<size; n++) {if (cells_arr[n] < 0) cells_arr[n] = -(cells_arr[n]+1);}
122     ierr = ISLocalToGlobalMappingRestoreIndices(ltogCells, (const PetscInt**)&cells_arr);CHKERRQ(ierr);
123     ierr = ISLocalToGlobalMappingApplyBlock(ltogCells, vOffsets[*numVertices], graph, graph);CHKERRQ(ierr);
124     ierr = ISLocalToGlobalMappingDestroy(&ltogCells);CHKERRQ(ierr);
125     ierr = ISDestroy(&cellNumbering);CHKERRQ(ierr);
126   }
127   if (adjacency) *adjacency = graph;
128   /* Clean up */
129   ierr = PetscSegBufferDestroy(&adjBuffer);CHKERRQ(ierr);
130   ierr = PetscSectionDestroy(&section);CHKERRQ(ierr);
131   ierr = PetscFree(adj);CHKERRQ(ierr);
132   PetscFunctionReturn(0);
133 }
134 
135 /*@C
136   DMPlexCreateNeighborCSR - Create a mesh graph (cell-cell adjacency) in parallel CSR format.
137 
138   Collective
139 
140   Input Arguments:
141 + dm - The DMPlex
142 - cellHeight - The height of mesh points to treat as cells (default should be 0)
143 
144   Output Arguments:
145 + numVertices - The number of local vertices in the graph, or cells in the mesh.
146 . offsets     - The offset to the adjacency list for each cell
147 - adjacency   - The adjacency list for all cells
148 
149   Note: This is suitable for input to a mesh partitioner like ParMetis.
150 
151   Level: advanced
152 
153 .seealso: DMPlexCreate()
154 @*/
155 PetscErrorCode DMPlexCreateNeighborCSR(DM dm, PetscInt cellHeight, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency)
156 {
157   const PetscInt maxFaceCases = 30;
158   PetscInt       numFaceCases = 0;
159   PetscInt       numFaceVertices[30]; /* maxFaceCases, C89 sucks sucks sucks */
160   PetscInt      *off, *adj;
161   PetscInt      *neighborCells = NULL;
162   PetscInt       dim, cellDim, depth = 0, faceDepth, cStart, cEnd, c, numCells, cell;
163   PetscErrorCode ierr;
164 
165   PetscFunctionBegin;
166   /* For parallel partitioning, I think you have to communicate supports */
167   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
168   cellDim = dim - cellHeight;
169   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
170   ierr = DMPlexGetHeightStratum(dm, cellHeight, &cStart, &cEnd);CHKERRQ(ierr);
171   if (cEnd - cStart == 0) {
172     if (numVertices) *numVertices = 0;
173     if (offsets)   *offsets   = NULL;
174     if (adjacency) *adjacency = NULL;
175     PetscFunctionReturn(0);
176   }
177   numCells  = cEnd - cStart;
178   faceDepth = depth - cellHeight;
179   if (dim == depth) {
180     PetscInt f, fStart, fEnd;
181 
182     ierr = PetscCalloc1(numCells+1, &off);CHKERRQ(ierr);
183     /* Count neighboring cells */
184     ierr = DMPlexGetHeightStratum(dm, cellHeight+1, &fStart, &fEnd);CHKERRQ(ierr);
185     for (f = fStart; f < fEnd; ++f) {
186       const PetscInt *support;
187       PetscInt        supportSize;
188       ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
189       ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
190       if (supportSize == 2) {
191         PetscInt numChildren;
192 
193         ierr = DMPlexGetTreeChildren(dm,f,&numChildren,NULL);CHKERRQ(ierr);
194         if (!numChildren) {
195           ++off[support[0]-cStart+1];
196           ++off[support[1]-cStart+1];
197         }
198       }
199     }
200     /* Prefix sum */
201     for (c = 1; c <= numCells; ++c) off[c] += off[c-1];
202     if (adjacency) {
203       PetscInt *tmp;
204 
205       ierr = PetscMalloc1(off[numCells], &adj);CHKERRQ(ierr);
206       ierr = PetscMalloc1(numCells+1, &tmp);CHKERRQ(ierr);
207       ierr = PetscMemcpy(tmp, off, (numCells+1) * sizeof(PetscInt));CHKERRQ(ierr);
208       /* Get neighboring cells */
209       for (f = fStart; f < fEnd; ++f) {
210         const PetscInt *support;
211         PetscInt        supportSize;
212         ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
213         ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
214         if (supportSize == 2) {
215           PetscInt numChildren;
216 
217           ierr = DMPlexGetTreeChildren(dm,f,&numChildren,NULL);CHKERRQ(ierr);
218           if (!numChildren) {
219             adj[tmp[support[0]-cStart]++] = support[1];
220             adj[tmp[support[1]-cStart]++] = support[0];
221           }
222         }
223       }
224 #if defined(PETSC_USE_DEBUG)
225       for (c = 0; c < cEnd-cStart; ++c) if (tmp[c] != off[c+1]) SETERRQ3(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Offset %d != %d for cell %d", tmp[c], off[c], c+cStart);
226 #endif
227       ierr = PetscFree(tmp);CHKERRQ(ierr);
228     }
229     if (numVertices) *numVertices = numCells;
230     if (offsets)   *offsets   = off;
231     if (adjacency) *adjacency = adj;
232     PetscFunctionReturn(0);
233   }
234   /* Setup face recognition */
235   if (faceDepth == 1) {
236     PetscInt cornersSeen[30] = {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0}; /* Could use PetscBT */
237 
238     for (c = cStart; c < cEnd; ++c) {
239       PetscInt corners;
240 
241       ierr = DMPlexGetConeSize(dm, c, &corners);CHKERRQ(ierr);
242       if (!cornersSeen[corners]) {
243         PetscInt nFV;
244 
245         if (numFaceCases >= maxFaceCases) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_PLIB, "Exceeded maximum number of face recognition cases");
246         cornersSeen[corners] = 1;
247 
248         ierr = DMPlexGetNumFaceVertices(dm, cellDim, corners, &nFV);CHKERRQ(ierr);
249 
250         numFaceVertices[numFaceCases++] = nFV;
251       }
252     }
253   }
254   ierr = PetscCalloc1(numCells+1, &off);CHKERRQ(ierr);
255   /* Count neighboring cells */
256   for (cell = cStart; cell < cEnd; ++cell) {
257     PetscInt numNeighbors = PETSC_DETERMINE, n;
258 
259     ierr = DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);CHKERRQ(ierr);
260     /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
261     for (n = 0; n < numNeighbors; ++n) {
262       PetscInt        cellPair[2];
263       PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
264       PetscInt        meetSize = 0;
265       const PetscInt *meet    = NULL;
266 
267       cellPair[0] = cell; cellPair[1] = neighborCells[n];
268       if (cellPair[0] == cellPair[1]) continue;
269       if (!found) {
270         ierr = DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
271         if (meetSize) {
272           PetscInt f;
273 
274           for (f = 0; f < numFaceCases; ++f) {
275             if (numFaceVertices[f] == meetSize) {
276               found = PETSC_TRUE;
277               break;
278             }
279           }
280         }
281         ierr = DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
282       }
283       if (found) ++off[cell-cStart+1];
284     }
285   }
286   /* Prefix sum */
287   for (cell = 1; cell <= numCells; ++cell) off[cell] += off[cell-1];
288 
289   if (adjacency) {
290     ierr = PetscMalloc1(off[numCells], &adj);CHKERRQ(ierr);
291     /* Get neighboring cells */
292     for (cell = cStart; cell < cEnd; ++cell) {
293       PetscInt numNeighbors = PETSC_DETERMINE, n;
294       PetscInt cellOffset   = 0;
295 
296       ierr = DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);CHKERRQ(ierr);
297       /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
298       for (n = 0; n < numNeighbors; ++n) {
299         PetscInt        cellPair[2];
300         PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
301         PetscInt        meetSize = 0;
302         const PetscInt *meet    = NULL;
303 
304         cellPair[0] = cell; cellPair[1] = neighborCells[n];
305         if (cellPair[0] == cellPair[1]) continue;
306         if (!found) {
307           ierr = DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
308           if (meetSize) {
309             PetscInt f;
310 
311             for (f = 0; f < numFaceCases; ++f) {
312               if (numFaceVertices[f] == meetSize) {
313                 found = PETSC_TRUE;
314                 break;
315               }
316             }
317           }
318           ierr = DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
319         }
320         if (found) {
321           adj[off[cell-cStart]+cellOffset] = neighborCells[n];
322           ++cellOffset;
323         }
324       }
325     }
326   }
327   ierr = PetscFree(neighborCells);CHKERRQ(ierr);
328   if (numVertices) *numVertices = numCells;
329   if (offsets)   *offsets   = off;
330   if (adjacency) *adjacency = adj;
331   PetscFunctionReturn(0);
332 }
333 
334 /*@C
335   PetscPartitionerRegister - Adds a new PetscPartitioner implementation
336 
337   Not Collective
338 
339   Input Parameters:
340 + name        - The name of a new user-defined creation routine
341 - create_func - The creation routine itself
342 
343   Notes:
344   PetscPartitionerRegister() may be called multiple times to add several user-defined PetscPartitioners
345 
346   Sample usage:
347 .vb
348     PetscPartitionerRegister("my_part", MyPetscPartitionerCreate);
349 .ve
350 
351   Then, your PetscPartitioner type can be chosen with the procedural interface via
352 .vb
353     PetscPartitionerCreate(MPI_Comm, PetscPartitioner *);
354     PetscPartitionerSetType(PetscPartitioner, "my_part");
355 .ve
356    or at runtime via the option
357 .vb
358     -petscpartitioner_type my_part
359 .ve
360 
361   Level: advanced
362 
363 .keywords: PetscPartitioner, register
364 .seealso: PetscPartitionerRegisterAll(), PetscPartitionerRegisterDestroy()
365 
366 @*/
367 PetscErrorCode PetscPartitionerRegister(const char sname[], PetscErrorCode (*function)(PetscPartitioner))
368 {
369   PetscErrorCode ierr;
370 
371   PetscFunctionBegin;
372   ierr = PetscFunctionListAdd(&PetscPartitionerList, sname, function);CHKERRQ(ierr);
373   PetscFunctionReturn(0);
374 }
375 
376 /*@C
377   PetscPartitionerSetType - Builds a particular PetscPartitioner
378 
379   Collective on PetscPartitioner
380 
381   Input Parameters:
382 + part - The PetscPartitioner object
383 - name - The kind of partitioner
384 
385   Options Database Key:
386 . -petscpartitioner_type <type> - Sets the PetscPartitioner type; use -help for a list of available types
387 
388   Level: intermediate
389 
390 .keywords: PetscPartitioner, set, type
391 .seealso: PetscPartitionerGetType(), PetscPartitionerCreate()
392 @*/
393 PetscErrorCode PetscPartitionerSetType(PetscPartitioner part, PetscPartitionerType name)
394 {
395   PetscErrorCode (*r)(PetscPartitioner);
396   PetscBool      match;
397   PetscErrorCode ierr;
398 
399   PetscFunctionBegin;
400   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
401   ierr = PetscObjectTypeCompare((PetscObject) part, name, &match);CHKERRQ(ierr);
402   if (match) PetscFunctionReturn(0);
403 
404   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
405   ierr = PetscFunctionListFind(PetscPartitionerList, name, &r);CHKERRQ(ierr);
406   if (!r) SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown PetscPartitioner type: %s", name);
407 
408   if (part->ops->destroy) {
409     ierr              = (*part->ops->destroy)(part);CHKERRQ(ierr);
410     part->ops->destroy = NULL;
411   }
412   ierr = (*r)(part);CHKERRQ(ierr);
413   ierr = PetscObjectChangeTypeName((PetscObject) part, name);CHKERRQ(ierr);
414   PetscFunctionReturn(0);
415 }
416 
417 /*@C
418   PetscPartitionerGetType - Gets the PetscPartitioner type name (as a string) from the object.
419 
420   Not Collective
421 
422   Input Parameter:
423 . part - The PetscPartitioner
424 
425   Output Parameter:
426 . name - The PetscPartitioner type name
427 
428   Level: intermediate
429 
430 .keywords: PetscPartitioner, get, type, name
431 .seealso: PetscPartitionerSetType(), PetscPartitionerCreate()
432 @*/
433 PetscErrorCode PetscPartitionerGetType(PetscPartitioner part, PetscPartitionerType *name)
434 {
435   PetscErrorCode ierr;
436 
437   PetscFunctionBegin;
438   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
439   PetscValidPointer(name, 2);
440   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
441   *name = ((PetscObject) part)->type_name;
442   PetscFunctionReturn(0);
443 }
444 
445 /*@C
446   PetscPartitionerView - Views a PetscPartitioner
447 
448   Collective on PetscPartitioner
449 
450   Input Parameter:
451 + part - the PetscPartitioner object to view
452 - v    - the viewer
453 
454   Level: developer
455 
456 .seealso: PetscPartitionerDestroy()
457 @*/
458 PetscErrorCode PetscPartitionerView(PetscPartitioner part, PetscViewer v)
459 {
460   PetscErrorCode ierr;
461 
462   PetscFunctionBegin;
463   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
464   if (!v) {ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject) part), &v);CHKERRQ(ierr);}
465   if (part->ops->view) {ierr = (*part->ops->view)(part, v);CHKERRQ(ierr);}
466   PetscFunctionReturn(0);
467 }
468 
469 PetscErrorCode PetscPartitionerSetTypeFromOptions_Internal(PetscPartitioner part)
470 {
471   const char    *defaultType;
472   char           name[256];
473   PetscBool      flg;
474   PetscErrorCode ierr;
475 
476   PetscFunctionBegin;
477   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
478   if (!((PetscObject) part)->type_name)
479 #if defined(PETSC_HAVE_CHACO)
480     defaultType = PETSCPARTITIONERCHACO;
481 #elif defined(PETSC_HAVE_PARMETIS)
482     defaultType = PETSCPARTITIONERPARMETIS;
483 #else
484     defaultType = PETSCPARTITIONERSIMPLE;
485 #endif
486   else
487     defaultType = ((PetscObject) part)->type_name;
488   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
489 
490   ierr = PetscObjectOptionsBegin((PetscObject) part);CHKERRQ(ierr);
491   ierr = PetscOptionsFList("-petscpartitioner_type", "Graph partitioner", "PetscPartitionerSetType", PetscPartitionerList, defaultType, name, 256, &flg);CHKERRQ(ierr);
492   if (flg) {
493     ierr = PetscPartitionerSetType(part, name);CHKERRQ(ierr);
494   } else if (!((PetscObject) part)->type_name) {
495     ierr = PetscPartitionerSetType(part, defaultType);CHKERRQ(ierr);
496   }
497   ierr = PetscOptionsEnd();CHKERRQ(ierr);
498   PetscFunctionReturn(0);
499 }
500 
501 /*@
502   PetscPartitionerSetFromOptions - sets parameters in a PetscPartitioner from the options database
503 
504   Collective on PetscPartitioner
505 
506   Input Parameter:
507 . part - the PetscPartitioner object to set options for
508 
509   Level: developer
510 
511 .seealso: PetscPartitionerView()
512 @*/
513 PetscErrorCode PetscPartitionerSetFromOptions(PetscPartitioner part)
514 {
515   PetscErrorCode ierr;
516 
517   PetscFunctionBegin;
518   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
519   ierr = PetscPartitionerSetTypeFromOptions_Internal(part);CHKERRQ(ierr);
520 
521   ierr = PetscObjectOptionsBegin((PetscObject) part);CHKERRQ(ierr);
522   if (part->ops->setfromoptions) {ierr = (*part->ops->setfromoptions)(PetscOptionsObject,part);CHKERRQ(ierr);}
523   /* process any options handlers added with PetscObjectAddOptionsHandler() */
524   ierr = PetscObjectProcessOptionsHandlers(PetscOptionsObject,(PetscObject) part);CHKERRQ(ierr);
525   ierr = PetscOptionsEnd();CHKERRQ(ierr);
526   ierr = PetscPartitionerViewFromOptions(part, NULL, "-petscpartitioner_view");CHKERRQ(ierr);
527   PetscFunctionReturn(0);
528 }
529 
530 /*@C
531   PetscPartitionerSetUp - Construct data structures for the PetscPartitioner
532 
533   Collective on PetscPartitioner
534 
535   Input Parameter:
536 . part - the PetscPartitioner object to setup
537 
538   Level: developer
539 
540 .seealso: PetscPartitionerView(), PetscPartitionerDestroy()
541 @*/
542 PetscErrorCode PetscPartitionerSetUp(PetscPartitioner part)
543 {
544   PetscErrorCode ierr;
545 
546   PetscFunctionBegin;
547   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
548   if (part->ops->setup) {ierr = (*part->ops->setup)(part);CHKERRQ(ierr);}
549   PetscFunctionReturn(0);
550 }
551 
552 /*@
553   PetscPartitionerDestroy - Destroys a PetscPartitioner object
554 
555   Collective on PetscPartitioner
556 
557   Input Parameter:
558 . part - the PetscPartitioner object to destroy
559 
560   Level: developer
561 
562 .seealso: PetscPartitionerView()
563 @*/
564 PetscErrorCode PetscPartitionerDestroy(PetscPartitioner *part)
565 {
566   PetscErrorCode ierr;
567 
568   PetscFunctionBegin;
569   if (!*part) PetscFunctionReturn(0);
570   PetscValidHeaderSpecific((*part), PETSCPARTITIONER_CLASSID, 1);
571 
572   if (--((PetscObject)(*part))->refct > 0) {*part = 0; PetscFunctionReturn(0);}
573   ((PetscObject) (*part))->refct = 0;
574 
575   if ((*part)->ops->destroy) {ierr = (*(*part)->ops->destroy)(*part);CHKERRQ(ierr);}
576   ierr = PetscHeaderDestroy(part);CHKERRQ(ierr);
577   PetscFunctionReturn(0);
578 }
579 
580 /*@
581   PetscPartitionerCreate - Creates an empty PetscPartitioner object. The type can then be set with PetscPartitionerSetType().
582 
583   Collective on MPI_Comm
584 
585   Input Parameter:
586 . comm - The communicator for the PetscPartitioner object
587 
588   Output Parameter:
589 . part - The PetscPartitioner object
590 
591   Level: beginner
592 
593 .seealso: PetscPartitionerSetType(), PETSCPARTITIONERCHACO, PETSCPARTITIONERPARMETIS, PETSCPARTITIONERSHELL, PETSCPARTITIONERSIMPLE, PETSCPARTITIONERGATHER
594 @*/
595 PetscErrorCode PetscPartitionerCreate(MPI_Comm comm, PetscPartitioner *part)
596 {
597   PetscPartitioner p;
598   PetscErrorCode   ierr;
599 
600   PetscFunctionBegin;
601   PetscValidPointer(part, 2);
602   *part = NULL;
603   ierr = DMInitializePackage();CHKERRQ(ierr);
604 
605   ierr = PetscHeaderCreate(p, PETSCPARTITIONER_CLASSID, "PetscPartitioner", "Graph Partitioner", "PetscPartitioner", comm, PetscPartitionerDestroy, PetscPartitionerView);CHKERRQ(ierr);
606 
607   *part = p;
608   PetscFunctionReturn(0);
609 }
610 
611 /*@
612   PetscPartitionerPartition - Create a non-overlapping partition of the cells in the mesh
613 
614   Collective on DM
615 
616   Input Parameters:
617 + part    - The PetscPartitioner
618 - dm      - The mesh DM
619 
620   Output Parameters:
621 + partSection     - The PetscSection giving the division of points by partition
622 - partition       - The list of points by partition
623 
624   Note: Instead of cells, points at a given height can be partitioned by calling PetscPartitionerSetPointHeight()
625 
626   Level: developer
627 
628 .seealso DMPlexDistribute(), PetscPartitionerSetPointHeight(), PetscPartitionerCreate()
629 @*/
630 PetscErrorCode PetscPartitionerPartition(PetscPartitioner part, DM dm, PetscSection partSection, IS *partition)
631 {
632   PetscMPIInt    size;
633   PetscErrorCode ierr;
634 
635   PetscFunctionBegin;
636   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
637   PetscValidHeaderSpecific(dm, DM_CLASSID, 2);
638   PetscValidHeaderSpecific(partSection, PETSC_SECTION_CLASSID, 4);
639   PetscValidPointer(partition, 5);
640   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) part), &size);CHKERRQ(ierr);
641   if (size == 1) {
642     PetscInt *points;
643     PetscInt  cStart, cEnd, c;
644 
645     ierr = DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);CHKERRQ(ierr);
646     ierr = PetscSectionSetChart(partSection, 0, size);CHKERRQ(ierr);
647     ierr = PetscSectionSetDof(partSection, 0, cEnd-cStart);CHKERRQ(ierr);
648     ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
649     ierr = PetscMalloc1(cEnd-cStart, &points);CHKERRQ(ierr);
650     for (c = cStart; c < cEnd; ++c) points[c] = c;
651     ierr = ISCreateGeneral(PetscObjectComm((PetscObject) part), cEnd-cStart, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
652     PetscFunctionReturn(0);
653   }
654   /* Obvious cheating, but I cannot think of a better way to do this. The DMSetFromOptions() has refinement in it,
655      so we cannot call it and have it feed down to the partitioner before partitioning */
656   ierr = PetscPartitionerSetFromOptions(part);CHKERRQ(ierr);
657   if (part->height == 0) {
658     PetscInt numVertices;
659     PetscInt *start     = NULL;
660     PetscInt *adjacency = NULL;
661     IS       globalNumbering;
662 
663     ierr = DMPlexCreatePartitionerGraph(dm, 0, &numVertices, &start, &adjacency, &globalNumbering);CHKERRQ(ierr);
664     if (!part->ops->partition) SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_WRONGSTATE, "PetscPartitioner has no type");
665     ierr = (*part->ops->partition)(part, dm, size, numVertices, start, adjacency, partSection, partition);CHKERRQ(ierr);
666     ierr = PetscFree(start);CHKERRQ(ierr);
667     ierr = PetscFree(adjacency);CHKERRQ(ierr);
668     if (globalNumbering) { /* partition is wrt global unique numbering: change this to be wrt local numbering */
669       const PetscInt *globalNum;
670       const PetscInt *partIdx;
671       PetscInt *map, cStart, cEnd;
672       PetscInt *adjusted, i, localSize, offset;
673       IS    newPartition;
674 
675       ierr = ISGetLocalSize(*partition,&localSize);CHKERRQ(ierr);
676       ierr = PetscMalloc1(localSize,&adjusted);CHKERRQ(ierr);
677       ierr = ISGetIndices(globalNumbering,&globalNum);CHKERRQ(ierr);
678       ierr = ISGetIndices(*partition,&partIdx);CHKERRQ(ierr);
679       ierr = PetscMalloc1(localSize,&map);CHKERRQ(ierr);
680       ierr = DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);CHKERRQ(ierr);
681       for (i = cStart, offset = 0; i < cEnd; i++) {
682         if (globalNum[i - cStart] >= 0) map[offset++] = i;
683       }
684       for (i = 0; i < localSize; i++) {
685         adjusted[i] = map[partIdx[i]];
686       }
687       ierr = PetscFree(map);CHKERRQ(ierr);
688       ierr = ISRestoreIndices(*partition,&partIdx);CHKERRQ(ierr);
689       ierr = ISRestoreIndices(globalNumbering,&globalNum);CHKERRQ(ierr);
690       ierr = ISCreateGeneral(PETSC_COMM_SELF,localSize,adjusted,PETSC_OWN_POINTER,&newPartition);CHKERRQ(ierr);
691       ierr = ISDestroy(&globalNumbering);CHKERRQ(ierr);
692       ierr = ISDestroy(partition);CHKERRQ(ierr);
693       *partition = newPartition;
694     }
695   } else SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_OUTOFRANGE, "Invalid height %D for points to partition", part->height);
696   PetscFunctionReturn(0);
697 
698 }
699 
700 static PetscErrorCode PetscPartitionerDestroy_Shell(PetscPartitioner part)
701 {
702   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
703   PetscErrorCode          ierr;
704 
705   PetscFunctionBegin;
706   ierr = PetscSectionDestroy(&p->section);CHKERRQ(ierr);
707   ierr = ISDestroy(&p->partition);CHKERRQ(ierr);
708   ierr = PetscFree(p);CHKERRQ(ierr);
709   PetscFunctionReturn(0);
710 }
711 
712 static PetscErrorCode PetscPartitionerView_Shell_Ascii(PetscPartitioner part, PetscViewer viewer)
713 {
714   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
715   PetscViewerFormat       format;
716   PetscErrorCode          ierr;
717 
718   PetscFunctionBegin;
719   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
720   ierr = PetscViewerASCIIPrintf(viewer, "Shell Graph Partitioner:\n");CHKERRQ(ierr);
721   if (p->random) {
722     ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
723     ierr = PetscViewerASCIIPrintf(viewer, "using random partition\n");CHKERRQ(ierr);
724     ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
725   }
726   PetscFunctionReturn(0);
727 }
728 
729 static PetscErrorCode PetscPartitionerView_Shell(PetscPartitioner part, PetscViewer viewer)
730 {
731   PetscBool      iascii;
732   PetscErrorCode ierr;
733 
734   PetscFunctionBegin;
735   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
736   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
737   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
738   if (iascii) {ierr = PetscPartitionerView_Shell_Ascii(part, viewer);CHKERRQ(ierr);}
739   PetscFunctionReturn(0);
740 }
741 
742 static PetscErrorCode PetscPartitionerSetFromOptions_Shell(PetscOptionItems *PetscOptionsObject, PetscPartitioner part)
743 {
744   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
745   PetscErrorCode          ierr;
746 
747   PetscFunctionBegin;
748   ierr = PetscOptionsHead(PetscOptionsObject, "PetscPartitionerShell Options");CHKERRQ(ierr);
749   ierr = PetscOptionsBool("-petscpartitioner_shell_random", "Use a random partition", "PetscPartitionerView", PETSC_FALSE, &p->random, NULL);CHKERRQ(ierr);
750   ierr = PetscOptionsTail();CHKERRQ(ierr);
751   PetscFunctionReturn(0);
752 }
753 
754 static PetscErrorCode PetscPartitionerPartition_Shell(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
755 {
756   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
757   PetscInt                np;
758   PetscErrorCode          ierr;
759 
760   PetscFunctionBegin;
761   if (p->random) {
762     PetscRandom r;
763     PetscInt   *sizes, *points, v;
764 
765     ierr = PetscRandomCreate(PETSC_COMM_SELF, &r);CHKERRQ(ierr);
766     ierr = PetscRandomSetInterval(r, 0.0, (PetscScalar) nparts);CHKERRQ(ierr);
767     ierr = PetscRandomSetFromOptions(r);CHKERRQ(ierr);
768     ierr = PetscCalloc2(nparts, &sizes, numVertices, &points);CHKERRQ(ierr);
769     for (v = 0; v < numVertices; ++v) {
770       PetscReal val;
771       PetscInt  part;
772 
773       ierr = PetscRandomGetValueReal(r, &val);CHKERRQ(ierr);
774       part = PetscFloorReal(val);
775       points[v] = part;
776       ++sizes[part];
777     }
778     ierr = PetscRandomDestroy(&r);CHKERRQ(ierr);
779     ierr = PetscPartitionerShellSetPartition(part, nparts, sizes, points);CHKERRQ(ierr);
780     ierr = PetscFree2(sizes, points);CHKERRQ(ierr);
781   }
782   if (!p->section) SETERRQ(PetscObjectComm((PetscObject) dm), PETSC_ERR_ARG_WRONG, "Shell partitioner information not provided. Please call PetscPartitionerShellSetPartition()");
783   ierr = PetscSectionGetChart(p->section, NULL, &np);CHKERRQ(ierr);
784   if (nparts != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of requested partitions %d != configured partitions %d", nparts, np);
785   ierr = ISGetLocalSize(p->partition, &np);CHKERRQ(ierr);
786   if (numVertices != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of input vertices %d != configured vertices %d", numVertices, np);
787   ierr = PetscSectionCopy(p->section, partSection);CHKERRQ(ierr);
788   *partition = p->partition;
789   ierr = PetscObjectReference((PetscObject) p->partition);CHKERRQ(ierr);
790   PetscFunctionReturn(0);
791 }
792 
793 static PetscErrorCode PetscPartitionerInitialize_Shell(PetscPartitioner part)
794 {
795   PetscFunctionBegin;
796   part->ops->view           = PetscPartitionerView_Shell;
797   part->ops->setfromoptions = PetscPartitionerSetFromOptions_Shell;
798   part->ops->destroy        = PetscPartitionerDestroy_Shell;
799   part->ops->partition      = PetscPartitionerPartition_Shell;
800   PetscFunctionReturn(0);
801 }
802 
803 /*MC
804   PETSCPARTITIONERSHELL = "shell" - A PetscPartitioner object
805 
806   Level: intermediate
807 
808 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
809 M*/
810 
811 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Shell(PetscPartitioner part)
812 {
813   PetscPartitioner_Shell *p;
814   PetscErrorCode          ierr;
815 
816   PetscFunctionBegin;
817   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
818   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
819   part->data = p;
820 
821   ierr = PetscPartitionerInitialize_Shell(part);CHKERRQ(ierr);
822   p->random = PETSC_FALSE;
823   PetscFunctionReturn(0);
824 }
825 
826 /*@C
827   PetscPartitionerShellSetPartition - Set an artifical partition for a mesh
828 
829   Collective on Part
830 
831   Input Parameters:
832 + part     - The PetscPartitioner
833 . size - The number of partitions
834 . sizes    - array of size size (or NULL) providing the number of points in each partition
835 - points   - array of size sum(sizes) (may be NULL iff sizes is NULL), a permutation of the points that groups those assigned to each partition in order (i.e., partition 0 first, partition 1 next, etc.)
836 
837   Level: developer
838 
839   Notes:
840 
841     It is safe to free the sizes and points arrays after use in this routine.
842 
843 .seealso DMPlexDistribute(), PetscPartitionerCreate()
844 @*/
845 PetscErrorCode PetscPartitionerShellSetPartition(PetscPartitioner part, PetscInt size, const PetscInt sizes[], const PetscInt points[])
846 {
847   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
848   PetscInt                proc, numPoints;
849   PetscErrorCode          ierr;
850 
851   PetscFunctionBegin;
852   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
853   if (sizes)  {PetscValidPointer(sizes, 3);}
854   if (points) {PetscValidPointer(points, 4);}
855   ierr = PetscSectionDestroy(&p->section);CHKERRQ(ierr);
856   ierr = ISDestroy(&p->partition);CHKERRQ(ierr);
857   ierr = PetscSectionCreate(PetscObjectComm((PetscObject) part), &p->section);CHKERRQ(ierr);
858   ierr = PetscSectionSetChart(p->section, 0, size);CHKERRQ(ierr);
859   if (sizes) {
860     for (proc = 0; proc < size; ++proc) {
861       ierr = PetscSectionSetDof(p->section, proc, sizes[proc]);CHKERRQ(ierr);
862     }
863   }
864   ierr = PetscSectionSetUp(p->section);CHKERRQ(ierr);
865   ierr = PetscSectionGetStorageSize(p->section, &numPoints);CHKERRQ(ierr);
866   ierr = ISCreateGeneral(PetscObjectComm((PetscObject) part), numPoints, points, PETSC_COPY_VALUES, &p->partition);CHKERRQ(ierr);
867   PetscFunctionReturn(0);
868 }
869 
870 /*@
871   PetscPartitionerShellSetRandom - Set the flag to use a random partition
872 
873   Collective on Part
874 
875   Input Parameters:
876 + part   - The PetscPartitioner
877 - random - The flag to use a random partition
878 
879   Level: intermediate
880 
881 .seealso PetscPartitionerShellGetRandom(), PetscPartitionerCreate()
882 @*/
883 PetscErrorCode PetscPartitionerShellSetRandom(PetscPartitioner part, PetscBool random)
884 {
885   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
886 
887   PetscFunctionBegin;
888   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
889   p->random = random;
890   PetscFunctionReturn(0);
891 }
892 
893 /*@
894   PetscPartitionerShellGetRandom - get the flag to use a random partition
895 
896   Collective on Part
897 
898   Input Parameter:
899 . part   - The PetscPartitioner
900 
901   Output Parameter
902 . random - The flag to use a random partition
903 
904   Level: intermediate
905 
906 .seealso PetscPartitionerShellSetRandom(), PetscPartitionerCreate()
907 @*/
908 PetscErrorCode PetscPartitionerShellGetRandom(PetscPartitioner part, PetscBool *random)
909 {
910   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
911 
912   PetscFunctionBegin;
913   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
914   PetscValidPointer(random, 2);
915   *random = p->random;
916   PetscFunctionReturn(0);
917 }
918 
919 static PetscErrorCode PetscPartitionerDestroy_Simple(PetscPartitioner part)
920 {
921   PetscPartitioner_Simple *p = (PetscPartitioner_Simple *) part->data;
922   PetscErrorCode          ierr;
923 
924   PetscFunctionBegin;
925   ierr = PetscFree(p);CHKERRQ(ierr);
926   PetscFunctionReturn(0);
927 }
928 
929 static PetscErrorCode PetscPartitionerView_Simple_Ascii(PetscPartitioner part, PetscViewer viewer)
930 {
931   PetscViewerFormat format;
932   PetscErrorCode    ierr;
933 
934   PetscFunctionBegin;
935   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
936   ierr = PetscViewerASCIIPrintf(viewer, "Simple Graph Partitioner:\n");CHKERRQ(ierr);
937   PetscFunctionReturn(0);
938 }
939 
940 static PetscErrorCode PetscPartitionerView_Simple(PetscPartitioner part, PetscViewer viewer)
941 {
942   PetscBool      iascii;
943   PetscErrorCode ierr;
944 
945   PetscFunctionBegin;
946   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
947   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
948   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
949   if (iascii) {ierr = PetscPartitionerView_Simple_Ascii(part, viewer);CHKERRQ(ierr);}
950   PetscFunctionReturn(0);
951 }
952 
953 static PetscErrorCode PetscPartitionerPartition_Simple(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
954 {
955   MPI_Comm       comm;
956   PetscInt       np;
957   PetscMPIInt    size;
958   PetscErrorCode ierr;
959 
960   PetscFunctionBegin;
961   comm = PetscObjectComm((PetscObject)dm);
962   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
963   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
964   ierr = ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);CHKERRQ(ierr);
965   if (size == 1) {
966     for (np = 0; np < nparts; ++np) {ierr = PetscSectionSetDof(partSection, np, numVertices/nparts + ((numVertices % nparts) > np));CHKERRQ(ierr);}
967   }
968   else {
969     PetscMPIInt rank;
970     PetscInt nvGlobal, *offsets, myFirst, myLast;
971 
972     ierr = PetscMalloc1(size+1,&offsets);CHKERRQ(ierr);
973     offsets[0] = 0;
974     ierr = MPI_Allgather(&numVertices,1,MPIU_INT,&offsets[1],1,MPIU_INT,comm);CHKERRQ(ierr);
975     for (np = 2; np <= size; np++) {
976       offsets[np] += offsets[np-1];
977     }
978     nvGlobal = offsets[size];
979     ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
980     myFirst = offsets[rank];
981     myLast  = offsets[rank + 1] - 1;
982     ierr = PetscFree(offsets);CHKERRQ(ierr);
983     if (numVertices) {
984       PetscInt firstPart = 0, firstLargePart = 0;
985       PetscInt lastPart = 0, lastLargePart = 0;
986       PetscInt rem = nvGlobal % nparts;
987       PetscInt pSmall = nvGlobal/nparts;
988       PetscInt pBig = nvGlobal/nparts + 1;
989 
990 
991       if (rem) {
992         firstLargePart = myFirst / pBig;
993         lastLargePart  = myLast  / pBig;
994 
995         if (firstLargePart < rem) {
996           firstPart = firstLargePart;
997         }
998         else {
999           firstPart = rem + (myFirst - (rem * pBig)) / pSmall;
1000         }
1001         if (lastLargePart < rem) {
1002           lastPart = lastLargePart;
1003         }
1004         else {
1005           lastPart = rem + (myLast - (rem * pBig)) / pSmall;
1006         }
1007       }
1008       else {
1009         firstPart = myFirst / (nvGlobal/nparts);
1010         lastPart  = myLast  / (nvGlobal/nparts);
1011       }
1012 
1013       for (np = firstPart; np <= lastPart; np++) {
1014         PetscInt PartStart =  np    * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np);
1015         PetscInt PartEnd   = (np+1) * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np+1);
1016 
1017         PartStart = PetscMax(PartStart,myFirst);
1018         PartEnd   = PetscMin(PartEnd,myLast+1);
1019         ierr = PetscSectionSetDof(partSection,np,PartEnd-PartStart);CHKERRQ(ierr);
1020       }
1021     }
1022   }
1023   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1024   PetscFunctionReturn(0);
1025 }
1026 
1027 static PetscErrorCode PetscPartitionerInitialize_Simple(PetscPartitioner part)
1028 {
1029   PetscFunctionBegin;
1030   part->ops->view      = PetscPartitionerView_Simple;
1031   part->ops->destroy   = PetscPartitionerDestroy_Simple;
1032   part->ops->partition = PetscPartitionerPartition_Simple;
1033   PetscFunctionReturn(0);
1034 }
1035 
1036 /*MC
1037   PETSCPARTITIONERSIMPLE = "simple" - A PetscPartitioner object
1038 
1039   Level: intermediate
1040 
1041 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1042 M*/
1043 
1044 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Simple(PetscPartitioner part)
1045 {
1046   PetscPartitioner_Simple *p;
1047   PetscErrorCode           ierr;
1048 
1049   PetscFunctionBegin;
1050   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1051   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1052   part->data = p;
1053 
1054   ierr = PetscPartitionerInitialize_Simple(part);CHKERRQ(ierr);
1055   PetscFunctionReturn(0);
1056 }
1057 
1058 static PetscErrorCode PetscPartitionerDestroy_Gather(PetscPartitioner part)
1059 {
1060   PetscPartitioner_Gather *p = (PetscPartitioner_Gather *) part->data;
1061   PetscErrorCode          ierr;
1062 
1063   PetscFunctionBegin;
1064   ierr = PetscFree(p);CHKERRQ(ierr);
1065   PetscFunctionReturn(0);
1066 }
1067 
1068 static PetscErrorCode PetscPartitionerView_Gather_Ascii(PetscPartitioner part, PetscViewer viewer)
1069 {
1070   PetscViewerFormat format;
1071   PetscErrorCode    ierr;
1072 
1073   PetscFunctionBegin;
1074   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
1075   ierr = PetscViewerASCIIPrintf(viewer, "Gather Graph Partitioner:\n");CHKERRQ(ierr);
1076   PetscFunctionReturn(0);
1077 }
1078 
1079 static PetscErrorCode PetscPartitionerView_Gather(PetscPartitioner part, PetscViewer viewer)
1080 {
1081   PetscBool      iascii;
1082   PetscErrorCode ierr;
1083 
1084   PetscFunctionBegin;
1085   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1086   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1087   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1088   if (iascii) {ierr = PetscPartitionerView_Gather_Ascii(part, viewer);CHKERRQ(ierr);}
1089   PetscFunctionReturn(0);
1090 }
1091 
1092 static PetscErrorCode PetscPartitionerPartition_Gather(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1093 {
1094   PetscInt       np;
1095   PetscErrorCode ierr;
1096 
1097   PetscFunctionBegin;
1098   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1099   ierr = ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);CHKERRQ(ierr);
1100   ierr = PetscSectionSetDof(partSection,0,numVertices);CHKERRQ(ierr);
1101   for (np = 1; np < nparts; ++np) {ierr = PetscSectionSetDof(partSection, np, 0);CHKERRQ(ierr);}
1102   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1103   PetscFunctionReturn(0);
1104 }
1105 
1106 static PetscErrorCode PetscPartitionerInitialize_Gather(PetscPartitioner part)
1107 {
1108   PetscFunctionBegin;
1109   part->ops->view      = PetscPartitionerView_Gather;
1110   part->ops->destroy   = PetscPartitionerDestroy_Gather;
1111   part->ops->partition = PetscPartitionerPartition_Gather;
1112   PetscFunctionReturn(0);
1113 }
1114 
1115 /*MC
1116   PETSCPARTITIONERGATHER = "gather" - A PetscPartitioner object
1117 
1118   Level: intermediate
1119 
1120 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1121 M*/
1122 
1123 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Gather(PetscPartitioner part)
1124 {
1125   PetscPartitioner_Gather *p;
1126   PetscErrorCode           ierr;
1127 
1128   PetscFunctionBegin;
1129   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1130   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1131   part->data = p;
1132 
1133   ierr = PetscPartitionerInitialize_Gather(part);CHKERRQ(ierr);
1134   PetscFunctionReturn(0);
1135 }
1136 
1137 
1138 static PetscErrorCode PetscPartitionerDestroy_Chaco(PetscPartitioner part)
1139 {
1140   PetscPartitioner_Chaco *p = (PetscPartitioner_Chaco *) part->data;
1141   PetscErrorCode          ierr;
1142 
1143   PetscFunctionBegin;
1144   ierr = PetscFree(p);CHKERRQ(ierr);
1145   PetscFunctionReturn(0);
1146 }
1147 
1148 static PetscErrorCode PetscPartitionerView_Chaco_Ascii(PetscPartitioner part, PetscViewer viewer)
1149 {
1150   PetscViewerFormat format;
1151   PetscErrorCode    ierr;
1152 
1153   PetscFunctionBegin;
1154   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
1155   ierr = PetscViewerASCIIPrintf(viewer, "Chaco Graph Partitioner:\n");CHKERRQ(ierr);
1156   PetscFunctionReturn(0);
1157 }
1158 
1159 static PetscErrorCode PetscPartitionerView_Chaco(PetscPartitioner part, PetscViewer viewer)
1160 {
1161   PetscBool      iascii;
1162   PetscErrorCode ierr;
1163 
1164   PetscFunctionBegin;
1165   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1166   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1167   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1168   if (iascii) {ierr = PetscPartitionerView_Chaco_Ascii(part, viewer);CHKERRQ(ierr);}
1169   PetscFunctionReturn(0);
1170 }
1171 
1172 #if defined(PETSC_HAVE_CHACO)
1173 #if defined(PETSC_HAVE_UNISTD_H)
1174 #include <unistd.h>
1175 #endif
1176 #if defined(PETSC_HAVE_CHACO_INT_ASSIGNMENT)
1177 #include <chaco.h>
1178 #else
1179 /* Older versions of Chaco do not have an include file */
1180 PETSC_EXTERN int interface(int nvtxs, int *start, int *adjacency, int *vwgts,
1181                        float *ewgts, float *x, float *y, float *z, char *outassignname,
1182                        char *outfilename, short *assignment, int architecture, int ndims_tot,
1183                        int mesh_dims[3], double *goal, int global_method, int local_method,
1184                        int rqi_flag, int vmax, int ndims, double eigtol, long seed);
1185 #endif
1186 extern int FREE_GRAPH;
1187 #endif
1188 
1189 static PetscErrorCode PetscPartitionerPartition_Chaco(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1190 {
1191 #if defined(PETSC_HAVE_CHACO)
1192   enum {DEFAULT_METHOD = 1, INERTIAL_METHOD = 3};
1193   MPI_Comm       comm;
1194   int            nvtxs          = numVertices; /* number of vertices in full graph */
1195   int           *vwgts          = NULL;   /* weights for all vertices */
1196   float         *ewgts          = NULL;   /* weights for all edges */
1197   float         *x              = NULL, *y = NULL, *z = NULL; /* coordinates for inertial method */
1198   char          *outassignname  = NULL;   /*  name of assignment output file */
1199   char          *outfilename    = NULL;   /* output file name */
1200   int            architecture   = 1;      /* 0 => hypercube, d => d-dimensional mesh */
1201   int            ndims_tot      = 0;      /* total number of cube dimensions to divide */
1202   int            mesh_dims[3];            /* dimensions of mesh of processors */
1203   double        *goal          = NULL;    /* desired set sizes for each set */
1204   int            global_method = 1;       /* global partitioning algorithm */
1205   int            local_method  = 1;       /* local partitioning algorithm */
1206   int            rqi_flag      = 0;       /* should I use RQI/Symmlq eigensolver? */
1207   int            vmax          = 200;     /* how many vertices to coarsen down to? */
1208   int            ndims         = 1;       /* number of eigenvectors (2^d sets) */
1209   double         eigtol        = 0.001;   /* tolerance on eigenvectors */
1210   long           seed          = 123636512; /* for random graph mutations */
1211 #if defined(PETSC_HAVE_CHACO_INT_ASSIGNMENT)
1212   int           *assignment;              /* Output partition */
1213 #else
1214   short int     *assignment;              /* Output partition */
1215 #endif
1216   int            fd_stdout, fd_pipe[2];
1217   PetscInt      *points;
1218   int            i, v, p;
1219   PetscErrorCode ierr;
1220 
1221   PetscFunctionBegin;
1222   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
1223   if (!numVertices) {
1224     ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1225     ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1226     ierr = ISCreateGeneral(comm, 0, NULL, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1227     PetscFunctionReturn(0);
1228   }
1229   FREE_GRAPH = 0;                         /* Do not let Chaco free my memory */
1230   for (i = 0; i < start[numVertices]; ++i) ++adjacency[i];
1231 
1232   if (global_method == INERTIAL_METHOD) {
1233     /* manager.createCellCoordinates(nvtxs, &x, &y, &z); */
1234     SETERRQ(comm, PETSC_ERR_SUP, "Inertial partitioning not yet supported");
1235   }
1236   mesh_dims[0] = nparts;
1237   mesh_dims[1] = 1;
1238   mesh_dims[2] = 1;
1239   ierr = PetscMalloc1(nvtxs, &assignment);CHKERRQ(ierr);
1240   /* Chaco outputs to stdout. We redirect this to a buffer. */
1241   /* TODO: check error codes for UNIX calls */
1242 #if defined(PETSC_HAVE_UNISTD_H)
1243   {
1244     int piperet;
1245     piperet = pipe(fd_pipe);
1246     if (piperet) SETERRQ(comm,PETSC_ERR_SYS,"Could not create pipe");
1247     fd_stdout = dup(1);
1248     close(1);
1249     dup2(fd_pipe[1], 1);
1250   }
1251 #endif
1252   ierr = interface(nvtxs, (int*) start, (int*) adjacency, vwgts, ewgts, x, y, z, outassignname, outfilename,
1253                    assignment, architecture, ndims_tot, mesh_dims, goal, global_method, local_method, rqi_flag,
1254                    vmax, ndims, eigtol, seed);
1255 #if defined(PETSC_HAVE_UNISTD_H)
1256   {
1257     char msgLog[10000];
1258     int  count;
1259 
1260     fflush(stdout);
1261     count = read(fd_pipe[0], msgLog, (10000-1)*sizeof(char));
1262     if (count < 0) count = 0;
1263     msgLog[count] = 0;
1264     close(1);
1265     dup2(fd_stdout, 1);
1266     close(fd_stdout);
1267     close(fd_pipe[0]);
1268     close(fd_pipe[1]);
1269     if (ierr) SETERRQ1(comm, PETSC_ERR_LIB, "Error in Chaco library: %s", msgLog);
1270   }
1271 #endif
1272   /* Convert to PetscSection+IS */
1273   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1274   for (v = 0; v < nvtxs; ++v) {
1275     ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);
1276   }
1277   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1278   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
1279   for (p = 0, i = 0; p < nparts; ++p) {
1280     for (v = 0; v < nvtxs; ++v) {
1281       if (assignment[v] == p) points[i++] = v;
1282     }
1283   }
1284   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1285   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1286   if (global_method == INERTIAL_METHOD) {
1287     /* manager.destroyCellCoordinates(nvtxs, &x, &y, &z); */
1288   }
1289   ierr = PetscFree(assignment);CHKERRQ(ierr);
1290   for (i = 0; i < start[numVertices]; ++i) --adjacency[i];
1291   PetscFunctionReturn(0);
1292 #else
1293   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-chaco.");
1294 #endif
1295 }
1296 
1297 static PetscErrorCode PetscPartitionerInitialize_Chaco(PetscPartitioner part)
1298 {
1299   PetscFunctionBegin;
1300   part->ops->view      = PetscPartitionerView_Chaco;
1301   part->ops->destroy   = PetscPartitionerDestroy_Chaco;
1302   part->ops->partition = PetscPartitionerPartition_Chaco;
1303   PetscFunctionReturn(0);
1304 }
1305 
1306 /*MC
1307   PETSCPARTITIONERCHACO = "chaco" - A PetscPartitioner object using the Chaco library
1308 
1309   Level: intermediate
1310 
1311 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1312 M*/
1313 
1314 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Chaco(PetscPartitioner part)
1315 {
1316   PetscPartitioner_Chaco *p;
1317   PetscErrorCode          ierr;
1318 
1319   PetscFunctionBegin;
1320   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1321   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1322   part->data = p;
1323 
1324   ierr = PetscPartitionerInitialize_Chaco(part);CHKERRQ(ierr);
1325   ierr = PetscCitationsRegister(ChacoPartitionerCitation, &ChacoPartitionercite);CHKERRQ(ierr);
1326   PetscFunctionReturn(0);
1327 }
1328 
1329 static PetscErrorCode PetscPartitionerDestroy_ParMetis(PetscPartitioner part)
1330 {
1331   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
1332   PetscErrorCode             ierr;
1333 
1334   PetscFunctionBegin;
1335   ierr = PetscFree(p);CHKERRQ(ierr);
1336   PetscFunctionReturn(0);
1337 }
1338 
1339 static PetscErrorCode PetscPartitionerView_ParMetis_Ascii(PetscPartitioner part, PetscViewer viewer)
1340 {
1341   PetscViewerFormat format;
1342   PetscErrorCode    ierr;
1343 
1344   PetscFunctionBegin;
1345   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
1346   ierr = PetscViewerASCIIPrintf(viewer, "ParMetis Graph Partitioner:\n");CHKERRQ(ierr);
1347   PetscFunctionReturn(0);
1348 }
1349 
1350 static PetscErrorCode PetscPartitionerView_ParMetis(PetscPartitioner part, PetscViewer viewer)
1351 {
1352   PetscBool      iascii;
1353   PetscErrorCode ierr;
1354 
1355   PetscFunctionBegin;
1356   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1357   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1358   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1359   if (iascii) {ierr = PetscPartitionerView_ParMetis_Ascii(part, viewer);CHKERRQ(ierr);}
1360   PetscFunctionReturn(0);
1361 }
1362 
1363 #if defined(PETSC_HAVE_PARMETIS)
1364 #include <parmetis.h>
1365 #endif
1366 
1367 static PetscErrorCode PetscPartitionerPartition_ParMetis(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1368 {
1369 #if defined(PETSC_HAVE_PARMETIS)
1370   MPI_Comm       comm;
1371   PetscSection   section;
1372   PetscInt       nvtxs      = numVertices; /* The number of vertices in full graph */
1373   PetscInt      *vtxdist;                  /* Distribution of vertices across processes */
1374   PetscInt      *xadj       = start;       /* Start of edge list for each vertex */
1375   PetscInt      *adjncy     = adjacency;   /* Edge lists for all vertices */
1376   PetscInt      *vwgt       = NULL;        /* Vertex weights */
1377   PetscInt      *adjwgt     = NULL;        /* Edge weights */
1378   PetscInt       wgtflag    = 0;           /* Indicates which weights are present */
1379   PetscInt       numflag    = 0;           /* Indicates initial offset (0 or 1) */
1380   PetscInt       ncon       = 1;           /* The number of weights per vertex */
1381   PetscReal     *tpwgts;                   /* The fraction of vertex weights assigned to each partition */
1382   PetscReal     *ubvec;                    /* The balance intolerance for vertex weights */
1383   PetscInt       options[5];               /* Options */
1384   /* Outputs */
1385   PetscInt       edgeCut;                  /* The number of edges cut by the partition */
1386   PetscInt      *assignment, *points;
1387   PetscMPIInt    rank, p, v, i;
1388   PetscErrorCode ierr;
1389 
1390   PetscFunctionBegin;
1391   ierr = PetscObjectGetComm((PetscObject) part, &comm);CHKERRQ(ierr);
1392   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1393   options[0] = 0; /* Use all defaults */
1394   /* Calculate vertex distribution */
1395   ierr = PetscMalloc5(nparts+1,&vtxdist,nparts*ncon,&tpwgts,ncon,&ubvec,nvtxs,&assignment,nvtxs,&vwgt);CHKERRQ(ierr);
1396   vtxdist[0] = 0;
1397   ierr = MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm);CHKERRQ(ierr);
1398   for (p = 2; p <= nparts; ++p) {
1399     vtxdist[p] += vtxdist[p-1];
1400   }
1401   /* Calculate weights */
1402   for (p = 0; p < nparts; ++p) {
1403     tpwgts[p] = 1.0/nparts;
1404   }
1405   ubvec[0] = 1.05;
1406   /* Weight cells by dofs on cell by default */
1407   ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
1408   if (section) {
1409     PetscInt cStart, cEnd, dof;
1410 
1411     ierr = DMPlexGetHeightStratum(dm, 0, &cStart, &cEnd);CHKERRQ(ierr);
1412     for (v = cStart; v < cEnd; ++v) {
1413       ierr = PetscSectionGetDof(section, v, &dof);CHKERRQ(ierr);
1414       vwgt[v-cStart] = PetscMax(dof, 1);
1415     }
1416   } else {
1417     for (v = 0; v < nvtxs; ++v) vwgt[v] = 1;
1418   }
1419 
1420   if (nparts == 1) {
1421     ierr = PetscMemzero(assignment, nvtxs * sizeof(PetscInt));CHKERRQ(ierr);
1422   } else {
1423     if (vtxdist[1] == vtxdist[nparts]) {
1424       if (!rank) {
1425         PetscStackPush("METIS_PartGraphKway");
1426         ierr = METIS_PartGraphKway(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, adjwgt, &nparts, tpwgts, ubvec, NULL, &edgeCut, assignment);
1427         PetscStackPop;
1428         if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphKway()");
1429       }
1430     } else {
1431       PetscStackPush("ParMETIS_V3_PartKway");
1432       ierr = ParMETIS_V3_PartKway(vtxdist, xadj, adjncy, vwgt, adjwgt, &wgtflag, &numflag, &ncon, &nparts, tpwgts, ubvec, options, &edgeCut, assignment, &comm);
1433       PetscStackPop;
1434       if (ierr != METIS_OK) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error %d in ParMETIS_V3_PartKway()", ierr);
1435     }
1436   }
1437   /* Convert to PetscSection+IS */
1438   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1439   for (v = 0; v < nvtxs; ++v) {ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);}
1440   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1441   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
1442   for (p = 0, i = 0; p < nparts; ++p) {
1443     for (v = 0; v < nvtxs; ++v) {
1444       if (assignment[v] == p) points[i++] = v;
1445     }
1446   }
1447   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1448   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1449   ierr = PetscFree5(vtxdist,tpwgts,ubvec,assignment,vwgt);CHKERRQ(ierr);
1450   PetscFunctionReturn(0);
1451 #else
1452   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-parmetis.");
1453 #endif
1454 }
1455 
1456 static PetscErrorCode PetscPartitionerInitialize_ParMetis(PetscPartitioner part)
1457 {
1458   PetscFunctionBegin;
1459   part->ops->view      = PetscPartitionerView_ParMetis;
1460   part->ops->destroy   = PetscPartitionerDestroy_ParMetis;
1461   part->ops->partition = PetscPartitionerPartition_ParMetis;
1462   PetscFunctionReturn(0);
1463 }
1464 
1465 /*MC
1466   PETSCPARTITIONERPARMETIS = "parmetis" - A PetscPartitioner object using the ParMetis library
1467 
1468   Level: intermediate
1469 
1470 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1471 M*/
1472 
1473 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_ParMetis(PetscPartitioner part)
1474 {
1475   PetscPartitioner_ParMetis *p;
1476   PetscErrorCode          ierr;
1477 
1478   PetscFunctionBegin;
1479   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1480   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1481   part->data = p;
1482 
1483   ierr = PetscPartitionerInitialize_ParMetis(part);CHKERRQ(ierr);
1484   ierr = PetscCitationsRegister(ParMetisPartitionerCitation, &ParMetisPartitionercite);CHKERRQ(ierr);
1485   PetscFunctionReturn(0);
1486 }
1487 
1488 /*@
1489   DMPlexGetPartitioner - Get the mesh partitioner
1490 
1491   Not collective
1492 
1493   Input Parameter:
1494 . dm - The DM
1495 
1496   Output Parameter:
1497 . part - The PetscPartitioner
1498 
1499   Level: developer
1500 
1501   Note: This gets a borrowed reference, so the user should not destroy this PetscPartitioner.
1502 
1503 .seealso DMPlexDistribute(), DMPlexSetPartitioner(), PetscPartitionerCreate()
1504 @*/
1505 PetscErrorCode DMPlexGetPartitioner(DM dm, PetscPartitioner *part)
1506 {
1507   DM_Plex       *mesh = (DM_Plex *) dm->data;
1508   PetscErrorCode ierr;
1509 
1510   PetscFunctionBegin;
1511   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1512   PetscValidPointer(part, 2);
1513   ierr = PetscPartitionerSetTypeFromOptions_Internal(mesh->partitioner);CHKERRQ(ierr);
1514   *part = mesh->partitioner;
1515   PetscFunctionReturn(0);
1516 }
1517 
1518 /*@
1519   DMPlexSetPartitioner - Set the mesh partitioner
1520 
1521   logically collective on dm and part
1522 
1523   Input Parameters:
1524 + dm - The DM
1525 - part - The partitioner
1526 
1527   Level: developer
1528 
1529   Note: Any existing PetscPartitioner will be destroyed.
1530 
1531 .seealso DMPlexDistribute(), DMPlexGetPartitioner(), PetscPartitionerCreate()
1532 @*/
1533 PetscErrorCode DMPlexSetPartitioner(DM dm, PetscPartitioner part)
1534 {
1535   DM_Plex       *mesh = (DM_Plex *) dm->data;
1536   PetscErrorCode ierr;
1537 
1538   PetscFunctionBegin;
1539   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1540   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 2);
1541   ierr = PetscObjectReference((PetscObject)part);CHKERRQ(ierr);
1542   ierr = PetscPartitionerDestroy(&mesh->partitioner);CHKERRQ(ierr);
1543   mesh->partitioner = part;
1544   PetscFunctionReturn(0);
1545 }
1546 
1547 static PetscErrorCode DMPlexPartitionLabelClosure_Tree(DM dm, DMLabel label, PetscInt rank, PetscInt point, PetscBool up, PetscBool down)
1548 {
1549   PetscErrorCode ierr;
1550 
1551   PetscFunctionBegin;
1552   if (up) {
1553     PetscInt parent;
1554 
1555     ierr = DMPlexGetTreeParent(dm,point,&parent,NULL);CHKERRQ(ierr);
1556     if (parent != point) {
1557       PetscInt closureSize, *closure = NULL, i;
1558 
1559       ierr = DMPlexGetTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);CHKERRQ(ierr);
1560       for (i = 0; i < closureSize; i++) {
1561         PetscInt cpoint = closure[2*i];
1562 
1563         ierr = DMLabelSetValue(label,cpoint,rank);CHKERRQ(ierr);
1564         ierr = DMPlexPartitionLabelClosure_Tree(dm,label,rank,cpoint,PETSC_TRUE,PETSC_FALSE);CHKERRQ(ierr);
1565       }
1566       ierr = DMPlexRestoreTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);CHKERRQ(ierr);
1567     }
1568   }
1569   if (down) {
1570     PetscInt numChildren;
1571     const PetscInt *children;
1572 
1573     ierr = DMPlexGetTreeChildren(dm,point,&numChildren,&children);CHKERRQ(ierr);
1574     if (numChildren) {
1575       PetscInt i;
1576 
1577       for (i = 0; i < numChildren; i++) {
1578         PetscInt cpoint = children[i];
1579 
1580         ierr = DMLabelSetValue(label,cpoint,rank);CHKERRQ(ierr);
1581         ierr = DMPlexPartitionLabelClosure_Tree(dm,label,rank,cpoint,PETSC_FALSE,PETSC_TRUE);CHKERRQ(ierr);
1582       }
1583     }
1584   }
1585   PetscFunctionReturn(0);
1586 }
1587 
1588 /*@
1589   DMPlexPartitionLabelClosure - Add the closure of all points to the partition label
1590 
1591   Input Parameters:
1592 + dm     - The DM
1593 - label  - DMLabel assinging ranks to remote roots
1594 
1595   Level: developer
1596 
1597 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1598 @*/
1599 PetscErrorCode DMPlexPartitionLabelClosure(DM dm, DMLabel label)
1600 {
1601   IS              rankIS,   pointIS;
1602   const PetscInt *ranks,   *points;
1603   PetscInt        numRanks, numPoints, r, p, c, closureSize;
1604   PetscInt       *closure = NULL;
1605   PetscErrorCode  ierr;
1606 
1607   PetscFunctionBegin;
1608   ierr = DMLabelGetValueIS(label, &rankIS);CHKERRQ(ierr);
1609   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
1610   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
1611   for (r = 0; r < numRanks; ++r) {
1612     const PetscInt rank = ranks[r];
1613 
1614     ierr = DMLabelGetStratumIS(label, rank, &pointIS);CHKERRQ(ierr);
1615     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
1616     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
1617     for (p = 0; p < numPoints; ++p) {
1618       ierr = DMPlexGetTransitiveClosure(dm, points[p], PETSC_TRUE, &closureSize, &closure);CHKERRQ(ierr);
1619       for (c = 0; c < closureSize*2; c += 2) {
1620         ierr = DMLabelSetValue(label, closure[c], rank);CHKERRQ(ierr);
1621         ierr = DMPlexPartitionLabelClosure_Tree(dm,label,rank,closure[c],PETSC_TRUE,PETSC_TRUE);CHKERRQ(ierr);
1622       }
1623     }
1624     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
1625     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1626   }
1627   if (closure) {ierr = DMPlexRestoreTransitiveClosure(dm, 0, PETSC_TRUE, &closureSize, &closure);CHKERRQ(ierr);}
1628   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
1629   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
1630   PetscFunctionReturn(0);
1631 }
1632 
1633 /*@
1634   DMPlexPartitionLabelAdjacency - Add one level of adjacent points to the partition label
1635 
1636   Input Parameters:
1637 + dm     - The DM
1638 - label  - DMLabel assinging ranks to remote roots
1639 
1640   Level: developer
1641 
1642 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1643 @*/
1644 PetscErrorCode DMPlexPartitionLabelAdjacency(DM dm, DMLabel label)
1645 {
1646   IS              rankIS,   pointIS;
1647   const PetscInt *ranks,   *points;
1648   PetscInt        numRanks, numPoints, r, p, a, adjSize;
1649   PetscInt       *adj = NULL;
1650   PetscErrorCode  ierr;
1651 
1652   PetscFunctionBegin;
1653   ierr = DMLabelGetValueIS(label, &rankIS);CHKERRQ(ierr);
1654   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
1655   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
1656   for (r = 0; r < numRanks; ++r) {
1657     const PetscInt rank = ranks[r];
1658 
1659     ierr = DMLabelGetStratumIS(label, rank, &pointIS);CHKERRQ(ierr);
1660     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
1661     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
1662     for (p = 0; p < numPoints; ++p) {
1663       adjSize = PETSC_DETERMINE;
1664       ierr = DMPlexGetAdjacency(dm, points[p], &adjSize, &adj);CHKERRQ(ierr);
1665       for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(label, adj[a], rank);CHKERRQ(ierr);}
1666     }
1667     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
1668     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1669   }
1670   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
1671   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
1672   ierr = PetscFree(adj);CHKERRQ(ierr);
1673   PetscFunctionReturn(0);
1674 }
1675 
1676 /*@
1677   DMPlexPartitionLabelPropagate - Propagate points in a partition label over the point SF
1678 
1679   Input Parameters:
1680 + dm     - The DM
1681 - label  - DMLabel assinging ranks to remote roots
1682 
1683   Level: developer
1684 
1685   Note: This is required when generating multi-level overlaps to capture
1686   overlap points from non-neighbouring partitions.
1687 
1688 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1689 @*/
1690 PetscErrorCode DMPlexPartitionLabelPropagate(DM dm, DMLabel label)
1691 {
1692   MPI_Comm        comm;
1693   PetscMPIInt     rank;
1694   PetscSF         sfPoint;
1695   DMLabel         lblRoots, lblLeaves;
1696   IS              rankIS, pointIS;
1697   const PetscInt *ranks;
1698   PetscInt        numRanks, r;
1699   PetscErrorCode  ierr;
1700 
1701   PetscFunctionBegin;
1702   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
1703   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1704   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
1705   /* Pull point contributions from remote leaves into local roots */
1706   ierr = DMLabelGather(label, sfPoint, &lblLeaves);CHKERRQ(ierr);
1707   ierr = DMLabelGetValueIS(lblLeaves, &rankIS);CHKERRQ(ierr);
1708   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
1709   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
1710   for (r = 0; r < numRanks; ++r) {
1711     const PetscInt remoteRank = ranks[r];
1712     if (remoteRank == rank) continue;
1713     ierr = DMLabelGetStratumIS(lblLeaves, remoteRank, &pointIS);CHKERRQ(ierr);
1714     ierr = DMLabelInsertIS(label, pointIS, remoteRank);CHKERRQ(ierr);
1715     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1716   }
1717   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
1718   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
1719   ierr = DMLabelDestroy(&lblLeaves);CHKERRQ(ierr);
1720   /* Push point contributions from roots into remote leaves */
1721   ierr = DMLabelDistribute(label, sfPoint, &lblRoots);CHKERRQ(ierr);
1722   ierr = DMLabelGetValueIS(lblRoots, &rankIS);CHKERRQ(ierr);
1723   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
1724   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
1725   for (r = 0; r < numRanks; ++r) {
1726     const PetscInt remoteRank = ranks[r];
1727     if (remoteRank == rank) continue;
1728     ierr = DMLabelGetStratumIS(lblRoots, remoteRank, &pointIS);CHKERRQ(ierr);
1729     ierr = DMLabelInsertIS(label, pointIS, remoteRank);CHKERRQ(ierr);
1730     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1731   }
1732   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
1733   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
1734   ierr = DMLabelDestroy(&lblRoots);CHKERRQ(ierr);
1735   PetscFunctionReturn(0);
1736 }
1737 
1738 /*@
1739   DMPlexPartitionLabelInvert - Create a partition label of remote roots from a local root label
1740 
1741   Input Parameters:
1742 + dm        - The DM
1743 . rootLabel - DMLabel assinging ranks to local roots
1744 . processSF - A star forest mapping into the local index on each remote rank
1745 
1746   Output Parameter:
1747 - leafLabel - DMLabel assinging ranks to remote roots
1748 
1749   Note: The rootLabel defines a send pattern by mapping local points to remote target ranks. The
1750   resulting leafLabel is a receiver mapping of remote roots to their parent rank.
1751 
1752   Level: developer
1753 
1754 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1755 @*/
1756 PetscErrorCode DMPlexPartitionLabelInvert(DM dm, DMLabel rootLabel, PetscSF processSF, DMLabel leafLabel)
1757 {
1758   MPI_Comm           comm;
1759   PetscMPIInt        rank, size;
1760   PetscInt           p, n, numNeighbors, ssize, l, nleaves;
1761   PetscSF            sfPoint;
1762   PetscSFNode       *rootPoints, *leafPoints;
1763   PetscSection       rootSection, leafSection;
1764   const PetscSFNode *remote;
1765   const PetscInt    *local, *neighbors;
1766   IS                 valueIS;
1767   PetscErrorCode     ierr;
1768 
1769   PetscFunctionBegin;
1770   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
1771   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1772   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
1773   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
1774 
1775   /* Convert to (point, rank) and use actual owners */
1776   ierr = PetscSectionCreate(comm, &rootSection);CHKERRQ(ierr);
1777   ierr = PetscSectionSetChart(rootSection, 0, size);CHKERRQ(ierr);
1778   ierr = DMLabelGetValueIS(rootLabel, &valueIS);CHKERRQ(ierr);
1779   ierr = ISGetLocalSize(valueIS, &numNeighbors);CHKERRQ(ierr);
1780   ierr = ISGetIndices(valueIS, &neighbors);CHKERRQ(ierr);
1781   for (n = 0; n < numNeighbors; ++n) {
1782     PetscInt numPoints;
1783 
1784     ierr = DMLabelGetStratumSize(rootLabel, neighbors[n], &numPoints);CHKERRQ(ierr);
1785     ierr = PetscSectionAddDof(rootSection, neighbors[n], numPoints);CHKERRQ(ierr);
1786   }
1787   ierr = PetscSectionSetUp(rootSection);CHKERRQ(ierr);
1788   ierr = PetscSectionGetStorageSize(rootSection, &ssize);CHKERRQ(ierr);
1789   ierr = PetscMalloc1(ssize, &rootPoints);CHKERRQ(ierr);
1790   ierr = PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);CHKERRQ(ierr);
1791   for (n = 0; n < numNeighbors; ++n) {
1792     IS              pointIS;
1793     const PetscInt *points;
1794     PetscInt        off, numPoints, p;
1795 
1796     ierr = PetscSectionGetOffset(rootSection, neighbors[n], &off);CHKERRQ(ierr);
1797     ierr = DMLabelGetStratumIS(rootLabel, neighbors[n], &pointIS);CHKERRQ(ierr);
1798     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
1799     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
1800     for (p = 0; p < numPoints; ++p) {
1801       if (local) {ierr = PetscFindInt(points[p], nleaves, local, &l);CHKERRQ(ierr);}
1802       else       {l = -1;}
1803       if (l >= 0) {rootPoints[off+p] = remote[l];}
1804       else        {rootPoints[off+p].index = points[p]; rootPoints[off+p].rank = rank;}
1805     }
1806     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
1807     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1808   }
1809   ierr = ISRestoreIndices(valueIS, &neighbors);CHKERRQ(ierr);
1810   ierr = ISDestroy(&valueIS);CHKERRQ(ierr);
1811   /* Communicate overlap */
1812   ierr = PetscSectionCreate(comm, &leafSection);CHKERRQ(ierr);
1813   ierr = DMPlexDistributeData(dm, processSF, rootSection, MPIU_2INT, rootPoints, leafSection, (void**) &leafPoints);CHKERRQ(ierr);
1814   /* Filter remote contributions (ovLeafPoints) into the overlapSF */
1815   ierr = PetscSectionGetStorageSize(leafSection, &ssize);CHKERRQ(ierr);
1816   for (p = 0; p < ssize; p++) {
1817     ierr = DMLabelSetValue(leafLabel, leafPoints[p].index, leafPoints[p].rank);CHKERRQ(ierr);
1818   }
1819   ierr = PetscFree(rootPoints);CHKERRQ(ierr);
1820   ierr = PetscSectionDestroy(&rootSection);CHKERRQ(ierr);
1821   ierr = PetscFree(leafPoints);CHKERRQ(ierr);
1822   ierr = PetscSectionDestroy(&leafSection);CHKERRQ(ierr);
1823   PetscFunctionReturn(0);
1824 }
1825 
1826 /*@
1827   DMPlexPartitionLabelCreateSF - Create a star forest from a label that assigns ranks to points
1828 
1829   Input Parameters:
1830 + dm    - The DM
1831 . label - DMLabel assinging ranks to remote roots
1832 
1833   Output Parameter:
1834 - sf    - The star forest communication context encapsulating the defined mapping
1835 
1836   Note: The incoming label is a receiver mapping of remote points to their parent rank.
1837 
1838   Level: developer
1839 
1840 .seealso: DMPlexDistribute(), DMPlexCreateOverlap
1841 @*/
1842 PetscErrorCode DMPlexPartitionLabelCreateSF(DM dm, DMLabel label, PetscSF *sf)
1843 {
1844   PetscMPIInt     rank, size;
1845   PetscInt        n, numRemote, p, numPoints, pStart, pEnd, idx = 0;
1846   PetscSFNode    *remotePoints;
1847   IS              remoteRootIS;
1848   const PetscInt *remoteRoots;
1849   PetscErrorCode ierr;
1850 
1851   PetscFunctionBegin;
1852   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
1853   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &size);CHKERRQ(ierr);
1854 
1855   for (numRemote = 0, n = 0; n < size; ++n) {
1856     ierr = DMLabelGetStratumSize(label, n, &numPoints);CHKERRQ(ierr);
1857     numRemote += numPoints;
1858   }
1859   ierr = PetscMalloc1(numRemote, &remotePoints);CHKERRQ(ierr);
1860   /* Put owned points first */
1861   ierr = DMLabelGetStratumSize(label, rank, &numPoints);CHKERRQ(ierr);
1862   if (numPoints > 0) {
1863     ierr = DMLabelGetStratumIS(label, rank, &remoteRootIS);CHKERRQ(ierr);
1864     ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
1865     for (p = 0; p < numPoints; p++) {
1866       remotePoints[idx].index = remoteRoots[p];
1867       remotePoints[idx].rank = rank;
1868       idx++;
1869     }
1870     ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
1871     ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr);
1872   }
1873   /* Now add remote points */
1874   for (n = 0; n < size; ++n) {
1875     ierr = DMLabelGetStratumSize(label, n, &numPoints);CHKERRQ(ierr);
1876     if (numPoints <= 0 || n == rank) continue;
1877     ierr = DMLabelGetStratumIS(label, n, &remoteRootIS);CHKERRQ(ierr);
1878     ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
1879     for (p = 0; p < numPoints; p++) {
1880       remotePoints[idx].index = remoteRoots[p];
1881       remotePoints[idx].rank = n;
1882       idx++;
1883     }
1884     ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
1885     ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr);
1886   }
1887   ierr = PetscSFCreate(PetscObjectComm((PetscObject) dm), sf);CHKERRQ(ierr);
1888   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
1889   ierr = PetscSFSetGraph(*sf, pEnd-pStart, numRemote, NULL, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);CHKERRQ(ierr);
1890   PetscFunctionReturn(0);
1891 }
1892