xref: /petsc/src/dm/impls/plex/plexpartition.c (revision e6e75211d226c622f451867f53ce5d558649ff4f)
1 #include <petsc/private/dmpleximpl.h>   /*I      "petscdmplex.h"   I*/
2 
3 PetscClassId PETSCPARTITIONER_CLASSID = 0;
4 
5 PetscFunctionList PetscPartitionerList              = NULL;
6 PetscBool         PetscPartitionerRegisterAllCalled = PETSC_FALSE;
7 
8 PetscBool ChacoPartitionercite = PETSC_FALSE;
9 const char ChacoPartitionerCitation[] = "@inproceedings{Chaco95,\n"
10                                "  author    = {Bruce Hendrickson and Robert Leland},\n"
11                                "  title     = {A multilevel algorithm for partitioning graphs},\n"
12                                "  booktitle = {Supercomputing '95: Proceedings of the 1995 ACM/IEEE Conference on Supercomputing (CDROM)},"
13                                "  isbn      = {0-89791-816-9},\n"
14                                "  pages     = {28},\n"
15                                "  doi       = {http://doi.acm.org/10.1145/224170.224228},\n"
16                                "  publisher = {ACM Press},\n"
17                                "  address   = {New York},\n"
18                                "  year      = {1995}\n}\n";
19 
20 PetscBool ParMetisPartitionercite = PETSC_FALSE;
21 const char ParMetisPartitionerCitation[] = "@article{KarypisKumar98,\n"
22                                "  author  = {George Karypis and Vipin Kumar},\n"
23                                "  title   = {A Parallel Algorithm for Multilevel Graph Partitioning and Sparse Matrix Ordering},\n"
24                                "  journal = {Journal of Parallel and Distributed Computing},\n"
25                                "  volume  = {48},\n"
26                                "  pages   = {71--85},\n"
27                                "  year    = {1998}\n}\n";
28 
29 #undef __FUNCT__
30 #define __FUNCT__ "DMPlexCreatePartitionerGraph"
31 /*@C
32   DMPlexCreatePartitionerGraph - Create a CSR graph of point connections for the partitioner
33 
34   Input Parameters:
35 + dm      - The mesh DM dm
36 - height  - Height of the strata from which to construct the graph
37 
38   Output Parameter:
39 + numVertices - Number of vertices in the graph
40 - offsets     - Point offsets in the graph
41 - adjacency   - Point connectivity in the graph
42 
43   The user can control the definition of adjacency for the mesh using DMPlexGetAdjacencyUseCone() and
44   DMPlexSetAdjacencyUseClosure(). They should choose the combination appropriate for the function
45   representation on the mesh.
46 
47   Level: developer
48 
49 .seealso: PetscPartitionerGetType(), PetscPartitionerCreate(), DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure()
50 @*/
51 PetscErrorCode DMPlexCreatePartitionerGraph(DM dm, PetscInt height, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency)
52 {
53   PetscInt       p, pStart, pEnd, a, adjSize, idx, size, nroots;
54   PetscInt      *adj = NULL, *vOffsets = NULL, *graph = NULL;
55   IS             cellNumbering;
56   const PetscInt *cellNum;
57   PetscBool      useCone, useClosure;
58   PetscSection   section;
59   PetscSegBuffer adjBuffer;
60   PetscSF        sfPoint;
61   PetscMPIInt    rank;
62   PetscErrorCode ierr;
63 
64   PetscFunctionBegin;
65   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
66   ierr = DMPlexGetHeightStratum(dm, height, &pStart, &pEnd);CHKERRQ(ierr);
67   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
68   ierr = PetscSFGetGraph(sfPoint, &nroots, NULL, NULL, NULL);CHKERRQ(ierr);
69   /* Build adjacency graph via a section/segbuffer */
70   ierr = PetscSectionCreate(PetscObjectComm((PetscObject) dm), &section);CHKERRQ(ierr);
71   ierr = PetscSectionSetChart(section, pStart, pEnd);CHKERRQ(ierr);
72   ierr = PetscSegBufferCreate(sizeof(PetscInt),1000,&adjBuffer);CHKERRQ(ierr);
73   /* Always use FVM adjacency to create partitioner graph */
74   ierr = DMPlexGetAdjacencyUseCone(dm, &useCone);CHKERRQ(ierr);
75   ierr = DMPlexGetAdjacencyUseClosure(dm, &useClosure);CHKERRQ(ierr);
76   ierr = DMPlexSetAdjacencyUseCone(dm, PETSC_TRUE);CHKERRQ(ierr);
77   ierr = DMPlexSetAdjacencyUseClosure(dm, PETSC_FALSE);CHKERRQ(ierr);
78   if (nroots > 0) {
79     ierr = DMPlexGetCellNumbering(dm, &cellNumbering);CHKERRQ(ierr);
80     ierr = ISGetIndices(cellNumbering, &cellNum);CHKERRQ(ierr);
81   }
82   for (*numVertices = 0, p = pStart; p < pEnd; p++) {
83     /* Skip non-owned cells in parallel (ParMetis expects no overlap) */
84     if (nroots > 0) {if (cellNum[p] < 0) continue;}
85     adjSize = PETSC_DETERMINE;
86     ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
87     for (a = 0; a < adjSize; ++a) {
88       const PetscInt point = adj[a];
89       if (point != p && pStart <= point && point < pEnd) {
90         PetscInt *PETSC_RESTRICT pBuf;
91         ierr = PetscSectionAddDof(section, p, 1);CHKERRQ(ierr);
92         ierr = PetscSegBufferGetInts(adjBuffer, 1, &pBuf);CHKERRQ(ierr);
93         *pBuf = point;
94       }
95     }
96     (*numVertices)++;
97   }
98   ierr = DMPlexSetAdjacencyUseCone(dm, useCone);CHKERRQ(ierr);
99   ierr = DMPlexSetAdjacencyUseClosure(dm, useClosure);CHKERRQ(ierr);
100   /* Derive CSR graph from section/segbuffer */
101   ierr = PetscSectionSetUp(section);CHKERRQ(ierr);
102   ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr);
103   ierr = PetscMalloc1(*numVertices+1, &vOffsets);CHKERRQ(ierr);
104   for (idx = 0, p = pStart; p < pEnd; p++) {
105     if (nroots > 0) {if (cellNum[p] < 0) continue;}
106     ierr = PetscSectionGetOffset(section, p, &(vOffsets[idx++]));CHKERRQ(ierr);
107   }
108   vOffsets[*numVertices] = size;
109   if (offsets) *offsets = vOffsets;
110   ierr = PetscSegBufferExtractAlloc(adjBuffer, &graph);CHKERRQ(ierr);
111   if (nroots > 0) {
112     ISLocalToGlobalMapping ltogCells;
113     PetscInt n, size, *cells_arr;
114     /* In parallel, apply a global cell numbering to the graph */
115     ierr = ISRestoreIndices(cellNumbering, &cellNum);CHKERRQ(ierr);
116     ierr = ISLocalToGlobalMappingCreateIS(cellNumbering, &ltogCells);CHKERRQ(ierr);
117     ierr = ISLocalToGlobalMappingGetSize(ltogCells, &size);CHKERRQ(ierr);
118     ierr = ISLocalToGlobalMappingGetIndices(ltogCells, (const PetscInt**)&cells_arr);CHKERRQ(ierr);
119     /* Convert to positive global cell numbers */
120     for (n=0; n<size; n++) {if (cells_arr[n] < 0) cells_arr[n] = -(cells_arr[n]+1);}
121     ierr = ISLocalToGlobalMappingRestoreIndices(ltogCells, (const PetscInt**)&cells_arr);CHKERRQ(ierr);
122     ierr = ISLocalToGlobalMappingApplyBlock(ltogCells, vOffsets[*numVertices], graph, graph);CHKERRQ(ierr);
123     ierr = ISLocalToGlobalMappingDestroy(&ltogCells);CHKERRQ(ierr);
124   }
125   if (adjacency) *adjacency = graph;
126   /* Clean up */
127   ierr = PetscSegBufferDestroy(&adjBuffer);CHKERRQ(ierr);
128   ierr = PetscSectionDestroy(&section);CHKERRQ(ierr);
129   ierr = PetscFree(adj);CHKERRQ(ierr);
130   PetscFunctionReturn(0);
131 }
132 
133 #undef __FUNCT__
134 #define __FUNCT__ "DMPlexCreateNeighborCSR"
135 PetscErrorCode DMPlexCreateNeighborCSR(DM dm, PetscInt cellHeight, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency)
136 {
137   const PetscInt maxFaceCases = 30;
138   PetscInt       numFaceCases = 0;
139   PetscInt       numFaceVertices[30]; /* maxFaceCases, C89 sucks sucks sucks */
140   PetscInt      *off, *adj;
141   PetscInt      *neighborCells = NULL;
142   PetscInt       dim, cellDim, depth = 0, faceDepth, cStart, cEnd, c, numCells, cell;
143   PetscErrorCode ierr;
144 
145   PetscFunctionBegin;
146   /* For parallel partitioning, I think you have to communicate supports */
147   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
148   cellDim = dim - cellHeight;
149   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
150   ierr = DMPlexGetHeightStratum(dm, cellHeight, &cStart, &cEnd);CHKERRQ(ierr);
151   if (cEnd - cStart == 0) {
152     if (numVertices) *numVertices = 0;
153     if (offsets)   *offsets   = NULL;
154     if (adjacency) *adjacency = NULL;
155     PetscFunctionReturn(0);
156   }
157   numCells  = cEnd - cStart;
158   faceDepth = depth - cellHeight;
159   if (dim == depth) {
160     PetscInt f, fStart, fEnd;
161 
162     ierr = PetscCalloc1(numCells+1, &off);CHKERRQ(ierr);
163     /* Count neighboring cells */
164     ierr = DMPlexGetHeightStratum(dm, cellHeight+1, &fStart, &fEnd);CHKERRQ(ierr);
165     for (f = fStart; f < fEnd; ++f) {
166       const PetscInt *support;
167       PetscInt        supportSize;
168       ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
169       ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
170       if (supportSize == 2) {
171         PetscInt numChildren;
172 
173         ierr = DMPlexGetTreeChildren(dm,f,&numChildren,NULL);CHKERRQ(ierr);
174         if (!numChildren) {
175           ++off[support[0]-cStart+1];
176           ++off[support[1]-cStart+1];
177         }
178       }
179     }
180     /* Prefix sum */
181     for (c = 1; c <= numCells; ++c) off[c] += off[c-1];
182     if (adjacency) {
183       PetscInt *tmp;
184 
185       ierr = PetscMalloc1(off[numCells], &adj);CHKERRQ(ierr);
186       ierr = PetscMalloc1(numCells+1, &tmp);CHKERRQ(ierr);
187       ierr = PetscMemcpy(tmp, off, (numCells+1) * sizeof(PetscInt));CHKERRQ(ierr);
188       /* Get neighboring cells */
189       for (f = fStart; f < fEnd; ++f) {
190         const PetscInt *support;
191         PetscInt        supportSize;
192         ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
193         ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
194         if (supportSize == 2) {
195           PetscInt numChildren;
196 
197           ierr = DMPlexGetTreeChildren(dm,f,&numChildren,NULL);CHKERRQ(ierr);
198           if (!numChildren) {
199             adj[tmp[support[0]-cStart]++] = support[1];
200             adj[tmp[support[1]-cStart]++] = support[0];
201           }
202         }
203       }
204 #if defined(PETSC_USE_DEBUG)
205       for (c = 0; c < cEnd-cStart; ++c) if (tmp[c] != off[c+1]) SETERRQ3(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Offset %d != %d for cell %d", tmp[c], off[c], c+cStart);
206 #endif
207       ierr = PetscFree(tmp);CHKERRQ(ierr);
208     }
209     if (numVertices) *numVertices = numCells;
210     if (offsets)   *offsets   = off;
211     if (adjacency) *adjacency = adj;
212     PetscFunctionReturn(0);
213   }
214   /* Setup face recognition */
215   if (faceDepth == 1) {
216     PetscInt cornersSeen[30] = {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0}; /* Could use PetscBT */
217 
218     for (c = cStart; c < cEnd; ++c) {
219       PetscInt corners;
220 
221       ierr = DMPlexGetConeSize(dm, c, &corners);CHKERRQ(ierr);
222       if (!cornersSeen[corners]) {
223         PetscInt nFV;
224 
225         if (numFaceCases >= maxFaceCases) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_PLIB, "Exceeded maximum number of face recognition cases");
226         cornersSeen[corners] = 1;
227 
228         ierr = DMPlexGetNumFaceVertices(dm, cellDim, corners, &nFV);CHKERRQ(ierr);
229 
230         numFaceVertices[numFaceCases++] = nFV;
231       }
232     }
233   }
234   ierr = PetscCalloc1(numCells+1, &off);CHKERRQ(ierr);
235   /* Count neighboring cells */
236   for (cell = cStart; cell < cEnd; ++cell) {
237     PetscInt numNeighbors = PETSC_DETERMINE, n;
238 
239     ierr = DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);CHKERRQ(ierr);
240     /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
241     for (n = 0; n < numNeighbors; ++n) {
242       PetscInt        cellPair[2];
243       PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
244       PetscInt        meetSize = 0;
245       const PetscInt *meet    = NULL;
246 
247       cellPair[0] = cell; cellPair[1] = neighborCells[n];
248       if (cellPair[0] == cellPair[1]) continue;
249       if (!found) {
250         ierr = DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
251         if (meetSize) {
252           PetscInt f;
253 
254           for (f = 0; f < numFaceCases; ++f) {
255             if (numFaceVertices[f] == meetSize) {
256               found = PETSC_TRUE;
257               break;
258             }
259           }
260         }
261         ierr = DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
262       }
263       if (found) ++off[cell-cStart+1];
264     }
265   }
266   /* Prefix sum */
267   for (cell = 1; cell <= numCells; ++cell) off[cell] += off[cell-1];
268 
269   if (adjacency) {
270     ierr = PetscMalloc1(off[numCells], &adj);CHKERRQ(ierr);
271     /* Get neighboring cells */
272     for (cell = cStart; cell < cEnd; ++cell) {
273       PetscInt numNeighbors = PETSC_DETERMINE, n;
274       PetscInt cellOffset   = 0;
275 
276       ierr = DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);CHKERRQ(ierr);
277       /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
278       for (n = 0; n < numNeighbors; ++n) {
279         PetscInt        cellPair[2];
280         PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
281         PetscInt        meetSize = 0;
282         const PetscInt *meet    = NULL;
283 
284         cellPair[0] = cell; cellPair[1] = neighborCells[n];
285         if (cellPair[0] == cellPair[1]) continue;
286         if (!found) {
287           ierr = DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
288           if (meetSize) {
289             PetscInt f;
290 
291             for (f = 0; f < numFaceCases; ++f) {
292               if (numFaceVertices[f] == meetSize) {
293                 found = PETSC_TRUE;
294                 break;
295               }
296             }
297           }
298           ierr = DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
299         }
300         if (found) {
301           adj[off[cell-cStart]+cellOffset] = neighborCells[n];
302           ++cellOffset;
303         }
304       }
305     }
306   }
307   ierr = PetscFree(neighborCells);CHKERRQ(ierr);
308   if (numVertices) *numVertices = numCells;
309   if (offsets)   *offsets   = off;
310   if (adjacency) *adjacency = adj;
311   PetscFunctionReturn(0);
312 }
313 
314 #undef __FUNCT__
315 #define __FUNCT__ "PetscPartitionerRegister"
316 /*@C
317   PetscPartitionerRegister - Adds a new PetscPartitioner implementation
318 
319   Not Collective
320 
321   Input Parameters:
322 + name        - The name of a new user-defined creation routine
323 - create_func - The creation routine itself
324 
325   Notes:
326   PetscPartitionerRegister() may be called multiple times to add several user-defined PetscPartitioners
327 
328   Sample usage:
329 .vb
330     PetscPartitionerRegister("my_part", MyPetscPartitionerCreate);
331 .ve
332 
333   Then, your PetscPartitioner type can be chosen with the procedural interface via
334 .vb
335     PetscPartitionerCreate(MPI_Comm, PetscPartitioner *);
336     PetscPartitionerSetType(PetscPartitioner, "my_part");
337 .ve
338    or at runtime via the option
339 .vb
340     -petscpartitioner_type my_part
341 .ve
342 
343   Level: advanced
344 
345 .keywords: PetscPartitioner, register
346 .seealso: PetscPartitionerRegisterAll(), PetscPartitionerRegisterDestroy()
347 
348 @*/
349 PetscErrorCode PetscPartitionerRegister(const char sname[], PetscErrorCode (*function)(PetscPartitioner))
350 {
351   PetscErrorCode ierr;
352 
353   PetscFunctionBegin;
354   ierr = PetscFunctionListAdd(&PetscPartitionerList, sname, function);CHKERRQ(ierr);
355   PetscFunctionReturn(0);
356 }
357 
358 #undef __FUNCT__
359 #define __FUNCT__ "PetscPartitionerSetType"
360 /*@C
361   PetscPartitionerSetType - Builds a particular PetscPartitioner
362 
363   Collective on PetscPartitioner
364 
365   Input Parameters:
366 + part - The PetscPartitioner object
367 - name - The kind of partitioner
368 
369   Options Database Key:
370 . -petscpartitioner_type <type> - Sets the PetscPartitioner type; use -help for a list of available types
371 
372   Level: intermediate
373 
374 .keywords: PetscPartitioner, set, type
375 .seealso: PetscPartitionerGetType(), PetscPartitionerCreate()
376 @*/
377 PetscErrorCode PetscPartitionerSetType(PetscPartitioner part, PetscPartitionerType name)
378 {
379   PetscErrorCode (*r)(PetscPartitioner);
380   PetscBool      match;
381   PetscErrorCode ierr;
382 
383   PetscFunctionBegin;
384   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
385   ierr = PetscObjectTypeCompare((PetscObject) part, name, &match);CHKERRQ(ierr);
386   if (match) PetscFunctionReturn(0);
387 
388   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
389   ierr = PetscFunctionListFind(PetscPartitionerList, name, &r);CHKERRQ(ierr);
390   if (!r) SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown PetscPartitioner type: %s", name);
391 
392   if (part->ops->destroy) {
393     ierr              = (*part->ops->destroy)(part);CHKERRQ(ierr);
394     part->ops->destroy = NULL;
395   }
396   ierr = (*r)(part);CHKERRQ(ierr);
397   ierr = PetscObjectChangeTypeName((PetscObject) part, name);CHKERRQ(ierr);
398   PetscFunctionReturn(0);
399 }
400 
401 #undef __FUNCT__
402 #define __FUNCT__ "PetscPartitionerGetType"
403 /*@C
404   PetscPartitionerGetType - Gets the PetscPartitioner type name (as a string) from the object.
405 
406   Not Collective
407 
408   Input Parameter:
409 . part - The PetscPartitioner
410 
411   Output Parameter:
412 . name - The PetscPartitioner type name
413 
414   Level: intermediate
415 
416 .keywords: PetscPartitioner, get, type, name
417 .seealso: PetscPartitionerSetType(), PetscPartitionerCreate()
418 @*/
419 PetscErrorCode PetscPartitionerGetType(PetscPartitioner part, PetscPartitionerType *name)
420 {
421   PetscErrorCode ierr;
422 
423   PetscFunctionBegin;
424   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
425   PetscValidPointer(name, 2);
426   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
427   *name = ((PetscObject) part)->type_name;
428   PetscFunctionReturn(0);
429 }
430 
431 #undef __FUNCT__
432 #define __FUNCT__ "PetscPartitionerView"
433 /*@C
434   PetscPartitionerView - Views a PetscPartitioner
435 
436   Collective on PetscPartitioner
437 
438   Input Parameter:
439 + part - the PetscPartitioner object to view
440 - v    - the viewer
441 
442   Level: developer
443 
444 .seealso: PetscPartitionerDestroy()
445 @*/
446 PetscErrorCode PetscPartitionerView(PetscPartitioner part, PetscViewer v)
447 {
448   PetscErrorCode ierr;
449 
450   PetscFunctionBegin;
451   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
452   if (!v) {ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject) part), &v);CHKERRQ(ierr);}
453   if (part->ops->view) {ierr = (*part->ops->view)(part, v);CHKERRQ(ierr);}
454   PetscFunctionReturn(0);
455 }
456 
457 #undef __FUNCT__
458 #define __FUNCT__ "PetscPartitionerSetTypeFromOptions_Internal"
459 PetscErrorCode PetscPartitionerSetTypeFromOptions_Internal(PetscPartitioner part)
460 {
461   const char    *defaultType;
462   char           name[256];
463   PetscBool      flg;
464   PetscErrorCode ierr;
465 
466   PetscFunctionBegin;
467   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
468   if (!((PetscObject) part)->type_name) defaultType = PETSCPARTITIONERCHACO;
469   else                                  defaultType = ((PetscObject) part)->type_name;
470   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
471 
472   ierr = PetscObjectOptionsBegin((PetscObject) part);CHKERRQ(ierr);
473   ierr = PetscOptionsFList("-petscpartitioner_type", "Graph partitioner", "PetscPartitionerSetType", PetscPartitionerList, defaultType, name, 256, &flg);CHKERRQ(ierr);
474   if (flg) {
475     ierr = PetscPartitionerSetType(part, name);CHKERRQ(ierr);
476   } else if (!((PetscObject) part)->type_name) {
477     ierr = PetscPartitionerSetType(part, defaultType);CHKERRQ(ierr);
478   }
479   ierr = PetscOptionsEnd();CHKERRQ(ierr);
480   PetscFunctionReturn(0);
481 }
482 
483 #undef __FUNCT__
484 #define __FUNCT__ "PetscPartitionerSetFromOptions"
485 /*@
486   PetscPartitionerSetFromOptions - sets parameters in a PetscPartitioner from the options database
487 
488   Collective on PetscPartitioner
489 
490   Input Parameter:
491 . part - the PetscPartitioner object to set options for
492 
493   Level: developer
494 
495 .seealso: PetscPartitionerView()
496 @*/
497 PetscErrorCode PetscPartitionerSetFromOptions(PetscPartitioner part)
498 {
499   PetscErrorCode ierr;
500 
501   PetscFunctionBegin;
502   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
503   ierr = PetscPartitionerSetTypeFromOptions_Internal(part);CHKERRQ(ierr);
504 
505   ierr = PetscObjectOptionsBegin((PetscObject) part);CHKERRQ(ierr);
506   if (part->ops->setfromoptions) {ierr = (*part->ops->setfromoptions)(part);CHKERRQ(ierr);}
507   /* process any options handlers added with PetscObjectAddOptionsHandler() */
508   ierr = PetscObjectProcessOptionsHandlers((PetscObject) part);CHKERRQ(ierr);
509   ierr = PetscOptionsEnd();CHKERRQ(ierr);
510   ierr = PetscPartitionerViewFromOptions(part, NULL, "-petscpartitioner_view");CHKERRQ(ierr);
511   PetscFunctionReturn(0);
512 }
513 
514 #undef __FUNCT__
515 #define __FUNCT__ "PetscPartitionerSetUp"
516 /*@C
517   PetscPartitionerSetUp - Construct data structures for the PetscPartitioner
518 
519   Collective on PetscPartitioner
520 
521   Input Parameter:
522 . part - the PetscPartitioner object to setup
523 
524   Level: developer
525 
526 .seealso: PetscPartitionerView(), PetscPartitionerDestroy()
527 @*/
528 PetscErrorCode PetscPartitionerSetUp(PetscPartitioner part)
529 {
530   PetscErrorCode ierr;
531 
532   PetscFunctionBegin;
533   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
534   if (part->ops->setup) {ierr = (*part->ops->setup)(part);CHKERRQ(ierr);}
535   PetscFunctionReturn(0);
536 }
537 
538 #undef __FUNCT__
539 #define __FUNCT__ "PetscPartitionerDestroy"
540 /*@
541   PetscPartitionerDestroy - Destroys a PetscPartitioner object
542 
543   Collective on PetscPartitioner
544 
545   Input Parameter:
546 . part - the PetscPartitioner object to destroy
547 
548   Level: developer
549 
550 .seealso: PetscPartitionerView()
551 @*/
552 PetscErrorCode PetscPartitionerDestroy(PetscPartitioner *part)
553 {
554   PetscErrorCode ierr;
555 
556   PetscFunctionBegin;
557   if (!*part) PetscFunctionReturn(0);
558   PetscValidHeaderSpecific((*part), PETSCPARTITIONER_CLASSID, 1);
559 
560   if (--((PetscObject)(*part))->refct > 0) {*part = 0; PetscFunctionReturn(0);}
561   ((PetscObject) (*part))->refct = 0;
562 
563   if ((*part)->ops->destroy) {ierr = (*(*part)->ops->destroy)(*part);CHKERRQ(ierr);}
564   ierr = PetscHeaderDestroy(part);CHKERRQ(ierr);
565   PetscFunctionReturn(0);
566 }
567 
568 #undef __FUNCT__
569 #define __FUNCT__ "PetscPartitionerCreate"
570 /*@
571   PetscPartitionerCreate - Creates an empty PetscPartitioner object. The type can then be set with PetscPartitionerSetType().
572 
573   Collective on MPI_Comm
574 
575   Input Parameter:
576 . comm - The communicator for the PetscPartitioner object
577 
578   Output Parameter:
579 . part - The PetscPartitioner object
580 
581   Level: beginner
582 
583 .seealso: PetscPartitionerSetType(), PETSCPARTITIONERCHACO, PETSCPARTITIONERPARMETIS, PETSCPARTITIONERSHELL, PETSCPARTITIONERSIMPLE
584 @*/
585 PetscErrorCode PetscPartitionerCreate(MPI_Comm comm, PetscPartitioner *part)
586 {
587   PetscPartitioner p;
588   PetscErrorCode   ierr;
589 
590   PetscFunctionBegin;
591   PetscValidPointer(part, 2);
592   *part = NULL;
593   ierr = PetscFVInitializePackage();CHKERRQ(ierr);
594 
595   ierr = PetscHeaderCreate(p, PETSCPARTITIONER_CLASSID, "PetscPartitioner", "Graph Partitioner", "PetscPartitioner", comm, PetscPartitionerDestroy, PetscPartitionerView);CHKERRQ(ierr);
596 
597   *part = p;
598   PetscFunctionReturn(0);
599 }
600 
601 #undef __FUNCT__
602 #define __FUNCT__ "PetscPartitionerPartition"
603 /*@
604   PetscPartitionerPartition - Create a non-overlapping partition of the cells in the mesh
605 
606   Collective on DM
607 
608   Input Parameters:
609 + part    - The PetscPartitioner
610 - dm      - The mesh DM
611 
612   Output Parameters:
613 + partSection     - The PetscSection giving the division of points by partition
614 - partition       - The list of points by partition
615 
616   Note: Instead of cells, points at a given height can be partitioned by calling PetscPartitionerSetPointHeight()
617 
618   Level: developer
619 
620 .seealso DMPlexDistribute(), PetscPartitionerSetPointHeight(), PetscPartitionerCreate()
621 @*/
622 PetscErrorCode PetscPartitionerPartition(PetscPartitioner part, DM dm, PetscSection partSection, IS *partition)
623 {
624   PetscMPIInt    size;
625   PetscErrorCode ierr;
626 
627   PetscFunctionBegin;
628   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
629   PetscValidHeaderSpecific(dm, DM_CLASSID, 2);
630   PetscValidHeaderSpecific(partSection, PETSC_SECTION_CLASSID, 4);
631   PetscValidPointer(partition, 5);
632   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) part), &size);CHKERRQ(ierr);
633   if (size == 1) {
634     PetscInt *points;
635     PetscInt  cStart, cEnd, c;
636 
637     ierr = DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);CHKERRQ(ierr);
638     ierr = PetscSectionSetChart(partSection, 0, size);CHKERRQ(ierr);
639     ierr = PetscSectionSetDof(partSection, 0, cEnd-cStart);CHKERRQ(ierr);
640     ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
641     ierr = PetscMalloc1(cEnd-cStart, &points);CHKERRQ(ierr);
642     for (c = cStart; c < cEnd; ++c) points[c] = c;
643     ierr = ISCreateGeneral(PetscObjectComm((PetscObject) part), cEnd-cStart, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
644     PetscFunctionReturn(0);
645   }
646   if (part->height == 0) {
647     PetscInt  numVertices;
648     PetscInt *start     = NULL;
649     PetscInt *adjacency = NULL;
650 
651     ierr = DMPlexCreatePartitionerGraph(dm, 0, &numVertices, &start, &adjacency);CHKERRQ(ierr);
652     if (!part->ops->partition) SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_WRONGSTATE, "PetscPartitioner has no type");
653     ierr = (*part->ops->partition)(part, dm, size, numVertices, start, adjacency, partSection, partition);CHKERRQ(ierr);
654     ierr = PetscFree(start);CHKERRQ(ierr);
655     ierr = PetscFree(adjacency);CHKERRQ(ierr);
656   } else SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_OUTOFRANGE, "Invalid height %D for points to partition", part->height);
657   PetscFunctionReturn(0);
658 }
659 
660 #undef __FUNCT__
661 #define __FUNCT__ "PetscPartitionerDestroy_Shell"
662 PetscErrorCode PetscPartitionerDestroy_Shell(PetscPartitioner part)
663 {
664   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
665   PetscErrorCode          ierr;
666 
667   PetscFunctionBegin;
668   ierr = PetscSectionDestroy(&p->section);CHKERRQ(ierr);
669   ierr = ISDestroy(&p->partition);CHKERRQ(ierr);
670   ierr = PetscFree(p);CHKERRQ(ierr);
671   PetscFunctionReturn(0);
672 }
673 
674 #undef __FUNCT__
675 #define __FUNCT__ "PetscPartitionerView_Shell_Ascii"
676 PetscErrorCode PetscPartitionerView_Shell_Ascii(PetscPartitioner part, PetscViewer viewer)
677 {
678   PetscViewerFormat format;
679   PetscErrorCode    ierr;
680 
681   PetscFunctionBegin;
682   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
683   ierr = PetscViewerASCIIPrintf(viewer, "Shell Graph Partitioner:\n");CHKERRQ(ierr);
684   PetscFunctionReturn(0);
685 }
686 
687 #undef __FUNCT__
688 #define __FUNCT__ "PetscPartitionerView_Shell"
689 PetscErrorCode PetscPartitionerView_Shell(PetscPartitioner part, PetscViewer viewer)
690 {
691   PetscBool      iascii;
692   PetscErrorCode ierr;
693 
694   PetscFunctionBegin;
695   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
696   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
697   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
698   if (iascii) {ierr = PetscPartitionerView_Shell_Ascii(part, viewer);CHKERRQ(ierr);}
699   PetscFunctionReturn(0);
700 }
701 
702 #undef __FUNCT__
703 #define __FUNCT__ "PetscPartitionerPartition_Shell"
704 PetscErrorCode PetscPartitionerPartition_Shell(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
705 {
706   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
707   PetscInt                np;
708   PetscErrorCode          ierr;
709 
710   PetscFunctionBegin;
711   ierr = PetscSectionGetChart(p->section, NULL, &np);CHKERRQ(ierr);
712   if (nparts != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of requested partitions %d != configured partitions %d", nparts, np);
713   ierr = ISGetLocalSize(p->partition, &np);CHKERRQ(ierr);
714   if (numVertices != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of input vertices %d != configured vertices %d", numVertices, np);
715   ierr = PetscSectionCopy(p->section, partSection);CHKERRQ(ierr);
716   *partition = p->partition;
717   ierr = PetscObjectReference((PetscObject) p->partition);CHKERRQ(ierr);
718   PetscFunctionReturn(0);
719 }
720 
721 #undef __FUNCT__
722 #define __FUNCT__ "PetscPartitionerInitialize_Shell"
723 PetscErrorCode PetscPartitionerInitialize_Shell(PetscPartitioner part)
724 {
725   PetscFunctionBegin;
726   part->ops->view      = PetscPartitionerView_Shell;
727   part->ops->destroy   = PetscPartitionerDestroy_Shell;
728   part->ops->partition = PetscPartitionerPartition_Shell;
729   PetscFunctionReturn(0);
730 }
731 
732 /*MC
733   PETSCPARTITIONERSHELL = "shell" - A PetscPartitioner object
734 
735   Level: intermediate
736 
737 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
738 M*/
739 
740 #undef __FUNCT__
741 #define __FUNCT__ "PetscPartitionerCreate_Shell"
742 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Shell(PetscPartitioner part)
743 {
744   PetscPartitioner_Shell *p;
745   PetscErrorCode          ierr;
746 
747   PetscFunctionBegin;
748   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
749   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
750   part->data = p;
751 
752   ierr = PetscPartitionerInitialize_Shell(part);CHKERRQ(ierr);
753   PetscFunctionReturn(0);
754 }
755 
756 #undef __FUNCT__
757 #define __FUNCT__ "PetscPartitionerShellSetPartition"
758 /*@C
759   PetscPartitionerShellSetPartition - Set an artifical partition for a mesh
760 
761   Collective on PART
762 
763   Input Parameters:
764 + part     - The PetscPartitioner
765 . numProcs - The number of partitions
766 . sizes    - array of size numProcs (or NULL) providing the number of points in each partition
767 - points   - array of size sum(sizes) (may be NULL iff sizes is NULL) providing the partition each point belongs to
768 
769   Level: developer
770 
771   Notes:
772 
773     It is safe to free the sizes and points arrays after use in this routine.
774 
775 .seealso DMPlexDistribute(), PetscPartitionerCreate()
776 @*/
777 PetscErrorCode PetscPartitionerShellSetPartition(PetscPartitioner part, PetscInt numProcs, const PetscInt sizes[], const PetscInt points[])
778 {
779   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
780   PetscInt                proc, numPoints;
781   PetscErrorCode          ierr;
782 
783   PetscFunctionBegin;
784   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
785   if (sizes) {PetscValidPointer(sizes, 3);}
786   if (sizes) {PetscValidPointer(points, 4);}
787   ierr = PetscSectionDestroy(&p->section);CHKERRQ(ierr);
788   ierr = ISDestroy(&p->partition);CHKERRQ(ierr);
789   ierr = PetscSectionCreate(PetscObjectComm((PetscObject) part), &p->section);CHKERRQ(ierr);
790   ierr = PetscSectionSetChart(p->section, 0, numProcs);CHKERRQ(ierr);
791   if (sizes) {
792     for (proc = 0; proc < numProcs; ++proc) {
793       ierr = PetscSectionSetDof(p->section, proc, sizes[proc]);CHKERRQ(ierr);
794     }
795   }
796   ierr = PetscSectionSetUp(p->section);CHKERRQ(ierr);
797   ierr = PetscSectionGetStorageSize(p->section, &numPoints);CHKERRQ(ierr);
798   ierr = ISCreateGeneral(PetscObjectComm((PetscObject) part), numPoints, points, PETSC_COPY_VALUES, &p->partition);CHKERRQ(ierr);
799   PetscFunctionReturn(0);
800 }
801 
802 #undef __FUNCT__
803 #define __FUNCT__ "PetscPartitionerDestroy_Simple"
804 PetscErrorCode PetscPartitionerDestroy_Simple(PetscPartitioner part)
805 {
806   PetscPartitioner_Simple *p = (PetscPartitioner_Simple *) part->data;
807   PetscErrorCode          ierr;
808 
809   PetscFunctionBegin;
810   ierr = PetscFree(p);CHKERRQ(ierr);
811   PetscFunctionReturn(0);
812 }
813 
814 #undef __FUNCT__
815 #define __FUNCT__ "PetscPartitionerView_Simple_Ascii"
816 PetscErrorCode PetscPartitionerView_Simple_Ascii(PetscPartitioner part, PetscViewer viewer)
817 {
818   PetscViewerFormat format;
819   PetscErrorCode    ierr;
820 
821   PetscFunctionBegin;
822   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
823   ierr = PetscViewerASCIIPrintf(viewer, "Simple Graph Partitioner:\n");CHKERRQ(ierr);
824   PetscFunctionReturn(0);
825 }
826 
827 #undef __FUNCT__
828 #define __FUNCT__ "PetscPartitionerView_Simple"
829 PetscErrorCode PetscPartitionerView_Simple(PetscPartitioner part, PetscViewer viewer)
830 {
831   PetscBool      iascii;
832   PetscErrorCode ierr;
833 
834   PetscFunctionBegin;
835   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
836   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
837   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
838   if (iascii) {ierr = PetscPartitionerView_Simple_Ascii(part, viewer);CHKERRQ(ierr);}
839   PetscFunctionReturn(0);
840 }
841 
842 #undef __FUNCT__
843 #define __FUNCT__ "PetscPartitionerPartition_Simple"
844 PetscErrorCode PetscPartitionerPartition_Simple(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
845 {
846   PetscInt       np;
847   PetscErrorCode ierr;
848 
849   PetscFunctionBegin;
850   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
851   for (np = 0; np < nparts; ++np) {ierr = PetscSectionSetDof(partSection, np, numVertices/nparts + ((numVertices % nparts) > np));CHKERRQ(ierr);}
852   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
853   ierr = ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);CHKERRQ(ierr);
854   PetscFunctionReturn(0);
855 }
856 
857 #undef __FUNCT__
858 #define __FUNCT__ "PetscPartitionerInitialize_Simple"
859 PetscErrorCode PetscPartitionerInitialize_Simple(PetscPartitioner part)
860 {
861   PetscFunctionBegin;
862   part->ops->view      = PetscPartitionerView_Simple;
863   part->ops->destroy   = PetscPartitionerDestroy_Simple;
864   part->ops->partition = PetscPartitionerPartition_Simple;
865   PetscFunctionReturn(0);
866 }
867 
868 /*MC
869   PETSCPARTITIONERSIMPLE = "simple" - A PetscPartitioner object
870 
871   Level: intermediate
872 
873 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
874 M*/
875 
876 #undef __FUNCT__
877 #define __FUNCT__ "PetscPartitionerCreate_Simple"
878 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Simple(PetscPartitioner part)
879 {
880   PetscPartitioner_Simple *p;
881   PetscErrorCode           ierr;
882 
883   PetscFunctionBegin;
884   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
885   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
886   part->data = p;
887 
888   ierr = PetscPartitionerInitialize_Simple(part);CHKERRQ(ierr);
889   PetscFunctionReturn(0);
890 }
891 
892 #undef __FUNCT__
893 #define __FUNCT__ "PetscPartitionerDestroy_Chaco"
894 PetscErrorCode PetscPartitionerDestroy_Chaco(PetscPartitioner part)
895 {
896   PetscPartitioner_Chaco *p = (PetscPartitioner_Chaco *) part->data;
897   PetscErrorCode          ierr;
898 
899   PetscFunctionBegin;
900   ierr = PetscFree(p);CHKERRQ(ierr);
901   PetscFunctionReturn(0);
902 }
903 
904 #undef __FUNCT__
905 #define __FUNCT__ "PetscPartitionerView_Chaco_Ascii"
906 PetscErrorCode PetscPartitionerView_Chaco_Ascii(PetscPartitioner part, PetscViewer viewer)
907 {
908   PetscViewerFormat format;
909   PetscErrorCode    ierr;
910 
911   PetscFunctionBegin;
912   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
913   ierr = PetscViewerASCIIPrintf(viewer, "Chaco Graph Partitioner:\n");CHKERRQ(ierr);
914   PetscFunctionReturn(0);
915 }
916 
917 #undef __FUNCT__
918 #define __FUNCT__ "PetscPartitionerView_Chaco"
919 PetscErrorCode PetscPartitionerView_Chaco(PetscPartitioner part, PetscViewer viewer)
920 {
921   PetscBool      iascii;
922   PetscErrorCode ierr;
923 
924   PetscFunctionBegin;
925   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
926   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
927   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
928   if (iascii) {ierr = PetscPartitionerView_Chaco_Ascii(part, viewer);CHKERRQ(ierr);}
929   PetscFunctionReturn(0);
930 }
931 
932 #if defined(PETSC_HAVE_CHACO)
933 #if defined(PETSC_HAVE_UNISTD_H)
934 #include <unistd.h>
935 #endif
936 /* Chaco does not have an include file */
937 PETSC_EXTERN int interface(int nvtxs, int *start, int *adjacency, int *vwgts,
938                        float *ewgts, float *x, float *y, float *z, char *outassignname,
939                        char *outfilename, short *assignment, int architecture, int ndims_tot,
940                        int mesh_dims[3], double *goal, int global_method, int local_method,
941                        int rqi_flag, int vmax, int ndims, double eigtol, long seed);
942 
943 extern int FREE_GRAPH;
944 #endif
945 
946 #undef __FUNCT__
947 #define __FUNCT__ "PetscPartitionerPartition_Chaco"
948 PetscErrorCode PetscPartitionerPartition_Chaco(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
949 {
950 #if defined(PETSC_HAVE_CHACO)
951   enum {DEFAULT_METHOD = 1, INERTIAL_METHOD = 3};
952   MPI_Comm       comm;
953   int            nvtxs          = numVertices; /* number of vertices in full graph */
954   int           *vwgts          = NULL;   /* weights for all vertices */
955   float         *ewgts          = NULL;   /* weights for all edges */
956   float         *x              = NULL, *y = NULL, *z = NULL; /* coordinates for inertial method */
957   char          *outassignname  = NULL;   /*  name of assignment output file */
958   char          *outfilename    = NULL;   /* output file name */
959   int            architecture   = 1;      /* 0 => hypercube, d => d-dimensional mesh */
960   int            ndims_tot      = 0;      /* total number of cube dimensions to divide */
961   int            mesh_dims[3];            /* dimensions of mesh of processors */
962   double        *goal          = NULL;    /* desired set sizes for each set */
963   int            global_method = 1;       /* global partitioning algorithm */
964   int            local_method  = 1;       /* local partitioning algorithm */
965   int            rqi_flag      = 0;       /* should I use RQI/Symmlq eigensolver? */
966   int            vmax          = 200;     /* how many vertices to coarsen down to? */
967   int            ndims         = 1;       /* number of eigenvectors (2^d sets) */
968   double         eigtol        = 0.001;   /* tolerance on eigenvectors */
969   long           seed          = 123636512; /* for random graph mutations */
970   short int     *assignment;              /* Output partition */
971   int            fd_stdout, fd_pipe[2];
972   PetscInt      *points;
973   int            i, v, p;
974   PetscErrorCode ierr;
975 
976   PetscFunctionBegin;
977   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
978   if (!numVertices) {
979     ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
980     ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
981     ierr = ISCreateGeneral(comm, 0, NULL, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
982     PetscFunctionReturn(0);
983   }
984   FREE_GRAPH = 0;                         /* Do not let Chaco free my memory */
985   for (i = 0; i < start[numVertices]; ++i) ++adjacency[i];
986 
987   if (global_method == INERTIAL_METHOD) {
988     /* manager.createCellCoordinates(nvtxs, &x, &y, &z); */
989     SETERRQ(comm, PETSC_ERR_SUP, "Inertial partitioning not yet supported");
990   }
991   mesh_dims[0] = nparts;
992   mesh_dims[1] = 1;
993   mesh_dims[2] = 1;
994   ierr = PetscMalloc1(nvtxs, &assignment);CHKERRQ(ierr);
995   /* Chaco outputs to stdout. We redirect this to a buffer. */
996   /* TODO: check error codes for UNIX calls */
997 #if defined(PETSC_HAVE_UNISTD_H)
998   {
999     int piperet;
1000     piperet = pipe(fd_pipe);
1001     if (piperet) SETERRQ(comm,PETSC_ERR_SYS,"Could not create pipe");
1002     fd_stdout = dup(1);
1003     close(1);
1004     dup2(fd_pipe[1], 1);
1005   }
1006 #endif
1007   ierr = interface(nvtxs, (int*) start, (int*) adjacency, vwgts, ewgts, x, y, z, outassignname, outfilename,
1008                    assignment, architecture, ndims_tot, mesh_dims, goal, global_method, local_method, rqi_flag,
1009                    vmax, ndims, eigtol, seed);
1010 #if defined(PETSC_HAVE_UNISTD_H)
1011   {
1012     char msgLog[10000];
1013     int  count;
1014 
1015     fflush(stdout);
1016     count = read(fd_pipe[0], msgLog, (10000-1)*sizeof(char));
1017     if (count < 0) count = 0;
1018     msgLog[count] = 0;
1019     close(1);
1020     dup2(fd_stdout, 1);
1021     close(fd_stdout);
1022     close(fd_pipe[0]);
1023     close(fd_pipe[1]);
1024     if (ierr) SETERRQ1(comm, PETSC_ERR_LIB, "Error in Chaco library: %s", msgLog);
1025   }
1026 #endif
1027   /* Convert to PetscSection+IS */
1028   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1029   for (v = 0; v < nvtxs; ++v) {
1030     ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);
1031   }
1032   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1033   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
1034   for (p = 0, i = 0; p < nparts; ++p) {
1035     for (v = 0; v < nvtxs; ++v) {
1036       if (assignment[v] == p) points[i++] = v;
1037     }
1038   }
1039   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1040   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1041   if (global_method == INERTIAL_METHOD) {
1042     /* manager.destroyCellCoordinates(nvtxs, &x, &y, &z); */
1043   }
1044   ierr = PetscFree(assignment);CHKERRQ(ierr);
1045   for (i = 0; i < start[numVertices]; ++i) --adjacency[i];
1046   PetscFunctionReturn(0);
1047 #else
1048   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-chaco.");
1049 #endif
1050 }
1051 
1052 #undef __FUNCT__
1053 #define __FUNCT__ "PetscPartitionerInitialize_Chaco"
1054 PetscErrorCode PetscPartitionerInitialize_Chaco(PetscPartitioner part)
1055 {
1056   PetscFunctionBegin;
1057   part->ops->view      = PetscPartitionerView_Chaco;
1058   part->ops->destroy   = PetscPartitionerDestroy_Chaco;
1059   part->ops->partition = PetscPartitionerPartition_Chaco;
1060   PetscFunctionReturn(0);
1061 }
1062 
1063 /*MC
1064   PETSCPARTITIONERCHACO = "chaco" - A PetscPartitioner object using the Chaco library
1065 
1066   Level: intermediate
1067 
1068 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1069 M*/
1070 
1071 #undef __FUNCT__
1072 #define __FUNCT__ "PetscPartitionerCreate_Chaco"
1073 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Chaco(PetscPartitioner part)
1074 {
1075   PetscPartitioner_Chaco *p;
1076   PetscErrorCode          ierr;
1077 
1078   PetscFunctionBegin;
1079   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1080   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1081   part->data = p;
1082 
1083   ierr = PetscPartitionerInitialize_Chaco(part);CHKERRQ(ierr);
1084   ierr = PetscCitationsRegister(ChacoPartitionerCitation, &ChacoPartitionercite);CHKERRQ(ierr);
1085   PetscFunctionReturn(0);
1086 }
1087 
1088 #undef __FUNCT__
1089 #define __FUNCT__ "PetscPartitionerDestroy_ParMetis"
1090 PetscErrorCode PetscPartitionerDestroy_ParMetis(PetscPartitioner part)
1091 {
1092   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
1093   PetscErrorCode             ierr;
1094 
1095   PetscFunctionBegin;
1096   ierr = PetscFree(p);CHKERRQ(ierr);
1097   PetscFunctionReturn(0);
1098 }
1099 
1100 #undef __FUNCT__
1101 #define __FUNCT__ "PetscPartitionerView_ParMetis_Ascii"
1102 PetscErrorCode PetscPartitionerView_ParMetis_Ascii(PetscPartitioner part, PetscViewer viewer)
1103 {
1104   PetscViewerFormat format;
1105   PetscErrorCode    ierr;
1106 
1107   PetscFunctionBegin;
1108   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
1109   ierr = PetscViewerASCIIPrintf(viewer, "ParMetis Graph Partitioner:\n");CHKERRQ(ierr);
1110   PetscFunctionReturn(0);
1111 }
1112 
1113 #undef __FUNCT__
1114 #define __FUNCT__ "PetscPartitionerView_ParMetis"
1115 PetscErrorCode PetscPartitionerView_ParMetis(PetscPartitioner part, PetscViewer viewer)
1116 {
1117   PetscBool      iascii;
1118   PetscErrorCode ierr;
1119 
1120   PetscFunctionBegin;
1121   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1122   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1123   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1124   if (iascii) {ierr = PetscPartitionerView_ParMetis_Ascii(part, viewer);CHKERRQ(ierr);}
1125   PetscFunctionReturn(0);
1126 }
1127 
1128 #if defined(PETSC_HAVE_PARMETIS)
1129 #include <parmetis.h>
1130 #endif
1131 
1132 #undef __FUNCT__
1133 #define __FUNCT__ "PetscPartitionerPartition_ParMetis"
1134 PetscErrorCode PetscPartitionerPartition_ParMetis(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1135 {
1136 #if defined(PETSC_HAVE_PARMETIS)
1137   MPI_Comm       comm;
1138   PetscInt       nvtxs      = numVertices; /* The number of vertices in full graph */
1139   PetscInt      *vtxdist;                  /* Distribution of vertices across processes */
1140   PetscInt      *xadj       = start;       /* Start of edge list for each vertex */
1141   PetscInt      *adjncy     = adjacency;   /* Edge lists for all vertices */
1142   PetscInt      *vwgt       = NULL;        /* Vertex weights */
1143   PetscInt      *adjwgt     = NULL;        /* Edge weights */
1144   PetscInt       wgtflag    = 0;           /* Indicates which weights are present */
1145   PetscInt       numflag    = 0;           /* Indicates initial offset (0 or 1) */
1146   PetscInt       ncon       = 1;           /* The number of weights per vertex */
1147   PetscReal     *tpwgts;                   /* The fraction of vertex weights assigned to each partition */
1148   PetscReal     *ubvec;                    /* The balance intolerance for vertex weights */
1149   PetscInt       options[5];               /* Options */
1150   /* Outputs */
1151   PetscInt       edgeCut;                  /* The number of edges cut by the partition */
1152   PetscInt      *assignment, *points;
1153   PetscMPIInt    rank, p, v, i;
1154   PetscErrorCode ierr;
1155 
1156   PetscFunctionBegin;
1157   ierr = PetscObjectGetComm((PetscObject) part, &comm);CHKERRQ(ierr);
1158   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1159   options[0] = 0; /* Use all defaults */
1160   /* Calculate vertex distribution */
1161   ierr = PetscMalloc4(nparts+1,&vtxdist,nparts*ncon,&tpwgts,ncon,&ubvec,nvtxs,&assignment);CHKERRQ(ierr);
1162   vtxdist[0] = 0;
1163   ierr = MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm);CHKERRQ(ierr);
1164   for (p = 2; p <= nparts; ++p) {
1165     vtxdist[p] += vtxdist[p-1];
1166   }
1167   /* Calculate weights */
1168   for (p = 0; p < nparts; ++p) {
1169     tpwgts[p] = 1.0/nparts;
1170   }
1171   ubvec[0] = 1.05;
1172 
1173   if (nparts == 1) {
1174     ierr = PetscMemzero(assignment, nvtxs * sizeof(PetscInt));
1175   } else {
1176     if (vtxdist[1] == vtxdist[nparts]) {
1177       if (!rank) {
1178         PetscStackPush("METIS_PartGraphKway");
1179         ierr = METIS_PartGraphKway(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, adjwgt, &nparts, tpwgts, ubvec, NULL, &edgeCut, assignment);
1180         PetscStackPop;
1181         if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphKway()");
1182       }
1183     } else {
1184       PetscStackPush("ParMETIS_V3_PartKway");
1185       ierr = ParMETIS_V3_PartKway(vtxdist, xadj, adjncy, vwgt, adjwgt, &wgtflag, &numflag, &ncon, &nparts, tpwgts, ubvec, options, &edgeCut, assignment, &comm);
1186       PetscStackPop;
1187       if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in ParMETIS_V3_PartKway()");
1188     }
1189   }
1190   /* Convert to PetscSection+IS */
1191   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1192   for (v = 0; v < nvtxs; ++v) {ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);}
1193   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1194   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
1195   for (p = 0, i = 0; p < nparts; ++p) {
1196     for (v = 0; v < nvtxs; ++v) {
1197       if (assignment[v] == p) points[i++] = v;
1198     }
1199   }
1200   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1201   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1202   ierr = PetscFree4(vtxdist,tpwgts,ubvec,assignment);CHKERRQ(ierr);
1203   PetscFunctionReturn(0);
1204 #else
1205   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-parmetis.");
1206 #endif
1207 }
1208 
1209 #undef __FUNCT__
1210 #define __FUNCT__ "PetscPartitionerInitialize_ParMetis"
1211 PetscErrorCode PetscPartitionerInitialize_ParMetis(PetscPartitioner part)
1212 {
1213   PetscFunctionBegin;
1214   part->ops->view      = PetscPartitionerView_ParMetis;
1215   part->ops->destroy   = PetscPartitionerDestroy_ParMetis;
1216   part->ops->partition = PetscPartitionerPartition_ParMetis;
1217   PetscFunctionReturn(0);
1218 }
1219 
1220 /*MC
1221   PETSCPARTITIONERPARMETIS = "parmetis" - A PetscPartitioner object using the ParMetis library
1222 
1223   Level: intermediate
1224 
1225 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1226 M*/
1227 
1228 #undef __FUNCT__
1229 #define __FUNCT__ "PetscPartitionerCreate_ParMetis"
1230 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_ParMetis(PetscPartitioner part)
1231 {
1232   PetscPartitioner_ParMetis *p;
1233   PetscErrorCode          ierr;
1234 
1235   PetscFunctionBegin;
1236   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1237   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1238   part->data = p;
1239 
1240   ierr = PetscPartitionerInitialize_ParMetis(part);CHKERRQ(ierr);
1241   ierr = PetscCitationsRegister(ParMetisPartitionerCitation, &ParMetisPartitionercite);CHKERRQ(ierr);
1242   PetscFunctionReturn(0);
1243 }
1244 
1245 #undef __FUNCT__
1246 #define __FUNCT__ "DMPlexGetPartitioner"
1247 /*@
1248   DMPlexGetPartitioner - Get the mesh partitioner
1249 
1250   Not collective
1251 
1252   Input Parameter:
1253 . dm - The DM
1254 
1255   Output Parameter:
1256 . part - The PetscPartitioner
1257 
1258   Level: developer
1259 
1260   Note: This gets a borrowed reference, so the user should not destroy this PetscPartitioner.
1261 
1262 .seealso DMPlexDistribute(), DMPlexSetPartitioner(), PetscPartitionerCreate()
1263 @*/
1264 PetscErrorCode DMPlexGetPartitioner(DM dm, PetscPartitioner *part)
1265 {
1266   DM_Plex *mesh = (DM_Plex *) dm->data;
1267 
1268   PetscFunctionBegin;
1269   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1270   PetscValidPointer(part, 2);
1271   *part = mesh->partitioner;
1272   PetscFunctionReturn(0);
1273 }
1274 
1275 #undef __FUNCT__
1276 #define __FUNCT__ "DMPlexSetPartitioner"
1277 /*@
1278   DMPlexSetPartitioner - Set the mesh partitioner
1279 
1280   logically collective on dm and part
1281 
1282   Input Parameters:
1283 + dm - The DM
1284 - part - The partitioner
1285 
1286   Level: developer
1287 
1288   Note: Any existing PetscPartitioner will be destroyed.
1289 
1290 .seealso DMPlexDistribute(), DMPlexGetPartitioner(), PetscPartitionerCreate()
1291 @*/
1292 PetscErrorCode DMPlexSetPartitioner(DM dm, PetscPartitioner part)
1293 {
1294   DM_Plex       *mesh = (DM_Plex *) dm->data;
1295   PetscErrorCode ierr;
1296 
1297   PetscFunctionBegin;
1298   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1299   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 2);
1300   ierr = PetscObjectReference((PetscObject)part);CHKERRQ(ierr);
1301   ierr = PetscPartitionerDestroy(&mesh->partitioner);CHKERRQ(ierr);
1302   mesh->partitioner = part;
1303   PetscFunctionReturn(0);
1304 }
1305 
1306 #undef __FUNCT__
1307 #define __FUNCT__ "DMPlexPartitionLabelClosure_Tree"
1308 static PetscErrorCode DMPlexPartitionLabelClosure_Tree(DM dm, DMLabel label, PetscInt rank, PetscInt point)
1309 {
1310   PetscInt       parent, closureSize, *closure = NULL, i, pStart, pEnd;
1311   PetscErrorCode ierr;
1312 
1313   PetscFunctionBegin;
1314   ierr = DMPlexGetTreeParent(dm,point,&parent,NULL);CHKERRQ(ierr);
1315   if (parent == point) PetscFunctionReturn(0);
1316   ierr = DMPlexGetChart(dm,&pStart,&pEnd);CHKERRQ(ierr);
1317   ierr = DMPlexGetTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);CHKERRQ(ierr);
1318   for (i = 0; i < closureSize; i++) {
1319     PetscInt cpoint = closure[2*i];
1320 
1321     ierr = DMLabelSetValue(label,cpoint,rank);CHKERRQ(ierr);
1322     ierr = DMPlexPartitionLabelClosure_Tree(dm,label,rank,cpoint);CHKERRQ(ierr);
1323   }
1324   ierr = DMPlexRestoreTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);CHKERRQ(ierr);
1325   PetscFunctionReturn(0);
1326 }
1327 
1328 #undef __FUNCT__
1329 #define __FUNCT__ "DMPlexPartitionLabelClosure"
1330 /*@
1331   DMPlexPartitionLabelClosure - Add the closure of all points to the partition label
1332 
1333   Input Parameters:
1334 + dm     - The DM
1335 - label  - DMLabel assinging ranks to remote roots
1336 
1337   Level: developer
1338 
1339 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1340 @*/
1341 PetscErrorCode DMPlexPartitionLabelClosure(DM dm, DMLabel label)
1342 {
1343   IS              rankIS,   pointIS;
1344   const PetscInt *ranks,   *points;
1345   PetscInt        numRanks, numPoints, r, p, c, closureSize;
1346   PetscInt       *closure = NULL;
1347   PetscErrorCode  ierr;
1348 
1349   PetscFunctionBegin;
1350   ierr = DMLabelGetValueIS(label, &rankIS);CHKERRQ(ierr);
1351   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
1352   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
1353   for (r = 0; r < numRanks; ++r) {
1354     const PetscInt rank = ranks[r];
1355 
1356     ierr = DMLabelGetStratumIS(label, rank, &pointIS);CHKERRQ(ierr);
1357     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
1358     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
1359     for (p = 0; p < numPoints; ++p) {
1360       ierr = DMPlexGetTransitiveClosure(dm, points[p], PETSC_TRUE, &closureSize, &closure);CHKERRQ(ierr);
1361       for (c = 0; c < closureSize*2; c += 2) {
1362         ierr = DMLabelSetValue(label, closure[c], rank);CHKERRQ(ierr);
1363         ierr = DMPlexPartitionLabelClosure_Tree(dm,label,rank,closure[c]);CHKERRQ(ierr);
1364       }
1365     }
1366     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
1367     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1368   }
1369   if (closure) {ierr = DMPlexRestoreTransitiveClosure(dm, 0, PETSC_TRUE, &closureSize, &closure);CHKERRQ(ierr);}
1370   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
1371   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
1372   PetscFunctionReturn(0);
1373 }
1374 
1375 #undef __FUNCT__
1376 #define __FUNCT__ "DMPlexPartitionLabelAdjacency"
1377 /*@
1378   DMPlexPartitionLabelAdjacency - Add one level of adjacent points to the partition label
1379 
1380   Input Parameters:
1381 + dm     - The DM
1382 - label  - DMLabel assinging ranks to remote roots
1383 
1384   Level: developer
1385 
1386 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1387 @*/
1388 PetscErrorCode DMPlexPartitionLabelAdjacency(DM dm, DMLabel label)
1389 {
1390   IS              rankIS,   pointIS;
1391   const PetscInt *ranks,   *points;
1392   PetscInt        numRanks, numPoints, r, p, a, adjSize;
1393   PetscInt       *adj = NULL;
1394   PetscErrorCode  ierr;
1395 
1396   PetscFunctionBegin;
1397   ierr = DMLabelGetValueIS(label, &rankIS);CHKERRQ(ierr);
1398   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
1399   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
1400   for (r = 0; r < numRanks; ++r) {
1401     const PetscInt rank = ranks[r];
1402 
1403     ierr = DMLabelGetStratumIS(label, rank, &pointIS);CHKERRQ(ierr);
1404     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
1405     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
1406     for (p = 0; p < numPoints; ++p) {
1407       adjSize = PETSC_DETERMINE;
1408       ierr = DMPlexGetAdjacency(dm, points[p], &adjSize, &adj);CHKERRQ(ierr);
1409       for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(label, adj[a], rank);CHKERRQ(ierr);}
1410     }
1411     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
1412     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1413   }
1414   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
1415   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
1416   ierr = PetscFree(adj);CHKERRQ(ierr);
1417   PetscFunctionReturn(0);
1418 }
1419 
1420 #undef __FUNCT__
1421 #define __FUNCT__ "DMPlexPartitionLabelInvert"
1422 /*@
1423   DMPlexPartitionLabelInvert - Create a partition label of remote roots from a local root label
1424 
1425   Input Parameters:
1426 + dm        - The DM
1427 . rootLabel - DMLabel assinging ranks to local roots
1428 . processSF - A star forest mapping into the local index on each remote rank
1429 
1430   Output Parameter:
1431 - leafLabel - DMLabel assinging ranks to remote roots
1432 
1433   Note: The rootLabel defines a send pattern by mapping local points to remote target ranks. The
1434   resulting leafLabel is a receiver mapping of remote roots to their parent rank.
1435 
1436   Level: developer
1437 
1438 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1439 @*/
1440 PetscErrorCode DMPlexPartitionLabelInvert(DM dm, DMLabel rootLabel, PetscSF processSF, DMLabel leafLabel)
1441 {
1442   MPI_Comm           comm;
1443   PetscMPIInt        rank, numProcs;
1444   PetscInt           p, n, numNeighbors, size, l, nleaves;
1445   PetscSF            sfPoint;
1446   PetscSFNode       *rootPoints, *leafPoints;
1447   PetscSection       rootSection, leafSection;
1448   const PetscSFNode *remote;
1449   const PetscInt    *local, *neighbors;
1450   IS                 valueIS;
1451   PetscErrorCode     ierr;
1452 
1453   PetscFunctionBegin;
1454   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
1455   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1456   ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr);
1457   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
1458 
1459   /* Convert to (point, rank) and use actual owners */
1460   ierr = PetscSectionCreate(comm, &rootSection);CHKERRQ(ierr);
1461   ierr = PetscSectionSetChart(rootSection, 0, numProcs);CHKERRQ(ierr);
1462   ierr = DMLabelGetValueIS(rootLabel, &valueIS);CHKERRQ(ierr);
1463   ierr = ISGetLocalSize(valueIS, &numNeighbors);CHKERRQ(ierr);
1464   ierr = ISGetIndices(valueIS, &neighbors);CHKERRQ(ierr);
1465   for (n = 0; n < numNeighbors; ++n) {
1466     PetscInt numPoints;
1467 
1468     ierr = DMLabelGetStratumSize(rootLabel, neighbors[n], &numPoints);CHKERRQ(ierr);
1469     ierr = PetscSectionAddDof(rootSection, neighbors[n], numPoints);CHKERRQ(ierr);
1470   }
1471   ierr = PetscSectionSetUp(rootSection);CHKERRQ(ierr);
1472   ierr = PetscSectionGetStorageSize(rootSection, &size);CHKERRQ(ierr);
1473   ierr = PetscMalloc1(size, &rootPoints);CHKERRQ(ierr);
1474   ierr = PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);CHKERRQ(ierr);
1475   for (n = 0; n < numNeighbors; ++n) {
1476     IS              pointIS;
1477     const PetscInt *points;
1478     PetscInt        off, numPoints, p;
1479 
1480     ierr = PetscSectionGetOffset(rootSection, neighbors[n], &off);CHKERRQ(ierr);
1481     ierr = DMLabelGetStratumIS(rootLabel, neighbors[n], &pointIS);CHKERRQ(ierr);
1482     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
1483     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
1484     for (p = 0; p < numPoints; ++p) {
1485       if (local) {ierr = PetscFindInt(points[p], nleaves, local, &l);CHKERRQ(ierr);}
1486       else       {l = -1;}
1487       if (l >= 0) {rootPoints[off+p] = remote[l];}
1488       else        {rootPoints[off+p].index = points[p]; rootPoints[off+p].rank = rank;}
1489     }
1490     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
1491     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1492   }
1493   ierr = ISRestoreIndices(valueIS, &neighbors);CHKERRQ(ierr);
1494   ierr = ISDestroy(&valueIS);CHKERRQ(ierr);
1495   /* Communicate overlap */
1496   ierr = PetscSectionCreate(comm, &leafSection);CHKERRQ(ierr);
1497   ierr = DMPlexDistributeData(dm, processSF, rootSection, MPIU_2INT, rootPoints, leafSection, (void**) &leafPoints);CHKERRQ(ierr);
1498   /* Filter remote contributions (ovLeafPoints) into the overlapSF */
1499   ierr = PetscSectionGetStorageSize(leafSection, &size);CHKERRQ(ierr);
1500   for (p = 0; p < size; p++) {
1501     ierr = DMLabelSetValue(leafLabel, leafPoints[p].index, leafPoints[p].rank);CHKERRQ(ierr);
1502   }
1503   ierr = PetscFree(rootPoints);CHKERRQ(ierr);
1504   ierr = PetscSectionDestroy(&rootSection);CHKERRQ(ierr);
1505   ierr = PetscFree(leafPoints);CHKERRQ(ierr);
1506   ierr = PetscSectionDestroy(&leafSection);CHKERRQ(ierr);
1507   PetscFunctionReturn(0);
1508 }
1509 
1510 #undef __FUNCT__
1511 #define __FUNCT__ "DMPlexPartitionLabelCreateSF"
1512 /*@
1513   DMPlexPartitionLabelCreateSF - Create a star forest from a label that assigns ranks to points
1514 
1515   Input Parameters:
1516 + dm    - The DM
1517 . label - DMLabel assinging ranks to remote roots
1518 
1519   Output Parameter:
1520 - sf    - The star forest communication context encapsulating the defined mapping
1521 
1522   Note: The incoming label is a receiver mapping of remote points to their parent rank.
1523 
1524   Level: developer
1525 
1526 .seealso: DMPlexDistribute(), DMPlexCreateOverlap
1527 @*/
1528 PetscErrorCode DMPlexPartitionLabelCreateSF(DM dm, DMLabel label, PetscSF *sf)
1529 {
1530   PetscMPIInt     rank, numProcs;
1531   PetscInt        n, numRemote, p, numPoints, pStart, pEnd, idx = 0;
1532   PetscSFNode    *remotePoints;
1533   IS              remoteRootIS;
1534   const PetscInt *remoteRoots;
1535   PetscErrorCode ierr;
1536 
1537   PetscFunctionBegin;
1538   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
1539   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &numProcs);CHKERRQ(ierr);
1540 
1541   for (numRemote = 0, n = 0; n < numProcs; ++n) {
1542     ierr = DMLabelGetStratumSize(label, n, &numPoints);CHKERRQ(ierr);
1543     numRemote += numPoints;
1544   }
1545   ierr = PetscMalloc1(numRemote, &remotePoints);CHKERRQ(ierr);
1546   /* Put owned points first */
1547   ierr = DMLabelGetStratumSize(label, rank, &numPoints);CHKERRQ(ierr);
1548   if (numPoints > 0) {
1549     ierr = DMLabelGetStratumIS(label, rank, &remoteRootIS);CHKERRQ(ierr);
1550     ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
1551     for (p = 0; p < numPoints; p++) {
1552       remotePoints[idx].index = remoteRoots[p];
1553       remotePoints[idx].rank = rank;
1554       idx++;
1555     }
1556     ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
1557     ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr);
1558   }
1559   /* Now add remote points */
1560   for (n = 0; n < numProcs; ++n) {
1561     ierr = DMLabelGetStratumSize(label, n, &numPoints);CHKERRQ(ierr);
1562     if (numPoints <= 0 || n == rank) continue;
1563     ierr = DMLabelGetStratumIS(label, n, &remoteRootIS);CHKERRQ(ierr);
1564     ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
1565     for (p = 0; p < numPoints; p++) {
1566       remotePoints[idx].index = remoteRoots[p];
1567       remotePoints[idx].rank = n;
1568       idx++;
1569     }
1570     ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
1571     ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr);
1572   }
1573   ierr = PetscSFCreate(PetscObjectComm((PetscObject) dm), sf);CHKERRQ(ierr);
1574   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
1575   ierr = PetscSFSetGraph(*sf, pEnd-pStart, numRemote, NULL, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);CHKERRQ(ierr);
1576   PetscFunctionReturn(0);
1577 }
1578