xref: /petsc/src/dm/impls/plex/plexpartition.c (revision 1ebf93c6b7d760d592de6ebe6cdc0debaa3caf75)
1 #include <petsc/private/dmpleximpl.h>   /*I      "petscdmplex.h"   I*/
2 
3 PetscClassId PETSCPARTITIONER_CLASSID = 0;
4 
5 PetscFunctionList PetscPartitionerList              = NULL;
6 PetscBool         PetscPartitionerRegisterAllCalled = PETSC_FALSE;
7 
8 PetscBool ChacoPartitionercite = PETSC_FALSE;
9 const char ChacoPartitionerCitation[] = "@inproceedings{Chaco95,\n"
10                                "  author    = {Bruce Hendrickson and Robert Leland},\n"
11                                "  title     = {A multilevel algorithm for partitioning graphs},\n"
12                                "  booktitle = {Supercomputing '95: Proceedings of the 1995 ACM/IEEE Conference on Supercomputing (CDROM)},"
13                                "  isbn      = {0-89791-816-9},\n"
14                                "  pages     = {28},\n"
15                                "  doi       = {http://doi.acm.org/10.1145/224170.224228},\n"
16                                "  publisher = {ACM Press},\n"
17                                "  address   = {New York},\n"
18                                "  year      = {1995}\n}\n";
19 
20 PetscBool ParMetisPartitionercite = PETSC_FALSE;
21 const char ParMetisPartitionerCitation[] = "@article{KarypisKumar98,\n"
22                                "  author  = {George Karypis and Vipin Kumar},\n"
23                                "  title   = {A Parallel Algorithm for Multilevel Graph Partitioning and Sparse Matrix Ordering},\n"
24                                "  journal = {Journal of Parallel and Distributed Computing},\n"
25                                "  volume  = {48},\n"
26                                "  pages   = {71--85},\n"
27                                "  year    = {1998}\n}\n";
28 
29 #undef __FUNCT__
30 #define __FUNCT__ "DMPlexCreatePartitionerGraph"
31 /*@C
32   DMPlexCreatePartitionerGraph - Create a CSR graph of point connections for the partitioner
33 
34   Input Parameters:
35 + dm      - The mesh DM dm
36 - height  - Height of the strata from which to construct the graph
37 
38   Output Parameter:
39 + numVertices - Number of vertices in the graph
40 - offsets     - Point offsets in the graph
41 - adjacency   - Point connectivity in the graph
42 
43   The user can control the definition of adjacency for the mesh using DMPlexGetAdjacencyUseCone() and
44   DMPlexSetAdjacencyUseClosure(). They should choose the combination appropriate for the function
45   representation on the mesh.
46 
47   Level: developer
48 
49 .seealso: PetscPartitionerGetType(), PetscPartitionerCreate(), DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure()
50 @*/
51 PetscErrorCode DMPlexCreatePartitionerGraph(DM dm, PetscInt height, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency)
52 {
53   PetscInt       p, pStart, pEnd, a, adjSize, idx, size, nroots;
54   PetscInt      *adj = NULL, *vOffsets = NULL, *graph = NULL;
55   IS             cellNumbering;
56   const PetscInt *cellNum;
57   PetscBool      useCone, useClosure;
58   PetscSection   section;
59   PetscSegBuffer adjBuffer;
60   PetscSF        sfPoint;
61   PetscMPIInt    rank;
62   PetscErrorCode ierr;
63 
64   PetscFunctionBegin;
65   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
66   ierr = DMPlexGetHeightStratum(dm, height, &pStart, &pEnd);CHKERRQ(ierr);
67   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
68   ierr = PetscSFGetGraph(sfPoint, &nroots, NULL, NULL, NULL);CHKERRQ(ierr);
69   /* Build adjacency graph via a section/segbuffer */
70   ierr = PetscSectionCreate(PetscObjectComm((PetscObject) dm), &section);CHKERRQ(ierr);
71   ierr = PetscSectionSetChart(section, pStart, pEnd);CHKERRQ(ierr);
72   ierr = PetscSegBufferCreate(sizeof(PetscInt),1000,&adjBuffer);CHKERRQ(ierr);
73   /* Always use FVM adjacency to create partitioner graph */
74   ierr = DMPlexGetAdjacencyUseCone(dm, &useCone);CHKERRQ(ierr);
75   ierr = DMPlexGetAdjacencyUseClosure(dm, &useClosure);CHKERRQ(ierr);
76   ierr = DMPlexSetAdjacencyUseCone(dm, PETSC_TRUE);CHKERRQ(ierr);
77   ierr = DMPlexSetAdjacencyUseClosure(dm, PETSC_FALSE);CHKERRQ(ierr);
78   ierr = DMPlexGetCellNumbering(dm, &cellNumbering);CHKERRQ(ierr);
79   ierr = ISGetIndices(cellNumbering, &cellNum);CHKERRQ(ierr);
80   for (*numVertices = 0, p = pStart; p < pEnd; p++) {
81     /* Skip non-owned cells in parallel (ParMetis expects no overlap) */
82     if (nroots > 0) {if (cellNum[p] < 0) continue;}
83     adjSize = PETSC_DETERMINE;
84     ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
85     for (a = 0; a < adjSize; ++a) {
86       const PetscInt point = adj[a];
87       if (point != p && pStart <= point && point < pEnd) {
88         PetscInt *PETSC_RESTRICT pBuf;
89         ierr = PetscSectionAddDof(section, p, 1);CHKERRQ(ierr);
90         ierr = PetscSegBufferGetInts(adjBuffer, 1, &pBuf);CHKERRQ(ierr);
91         *pBuf = point;
92       }
93     }
94     (*numVertices)++;
95   }
96   ierr = DMPlexSetAdjacencyUseCone(dm, useCone);CHKERRQ(ierr);
97   ierr = DMPlexSetAdjacencyUseClosure(dm, useClosure);CHKERRQ(ierr);
98   /* Derive CSR graph from section/segbuffer */
99   ierr = PetscSectionSetUp(section);CHKERRQ(ierr);
100   ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr);
101   ierr = PetscMalloc1(*numVertices+1, &vOffsets);CHKERRQ(ierr);
102   for (idx = 0, p = pStart; p < pEnd; p++) {
103     if (nroots > 0) {if (cellNum[p] < 0) continue;}
104     ierr = PetscSectionGetOffset(section, p, &(vOffsets[idx++]));CHKERRQ(ierr);
105   }
106   vOffsets[*numVertices] = size;
107   if (offsets) *offsets = vOffsets;
108   ierr = PetscSegBufferExtractAlloc(adjBuffer, &graph);CHKERRQ(ierr);
109   {
110     ISLocalToGlobalMapping ltogCells;
111     PetscInt n, size, *cells_arr;
112     /* In parallel, apply a global cell numbering to the graph */
113     ierr = ISRestoreIndices(cellNumbering, &cellNum);CHKERRQ(ierr);
114     ierr = ISLocalToGlobalMappingCreateIS(cellNumbering, &ltogCells);CHKERRQ(ierr);
115     ierr = ISLocalToGlobalMappingGetSize(ltogCells, &size);CHKERRQ(ierr);
116     ierr = ISLocalToGlobalMappingGetIndices(ltogCells, (const PetscInt**)&cells_arr);CHKERRQ(ierr);
117     /* Convert to positive global cell numbers */
118     for (n=0; n<size; n++) {if (cells_arr[n] < 0) cells_arr[n] = -(cells_arr[n]+1);}
119     ierr = ISLocalToGlobalMappingRestoreIndices(ltogCells, (const PetscInt**)&cells_arr);CHKERRQ(ierr);
120     ierr = ISLocalToGlobalMappingApplyBlock(ltogCells, vOffsets[*numVertices], graph, graph);CHKERRQ(ierr);
121     ierr = ISLocalToGlobalMappingDestroy(&ltogCells);CHKERRQ(ierr);
122   }
123   if (adjacency) *adjacency = graph;
124   /* Clean up */
125   ierr = PetscSegBufferDestroy(&adjBuffer);CHKERRQ(ierr);
126   ierr = PetscSectionDestroy(&section);CHKERRQ(ierr);
127   ierr = PetscFree(adj);CHKERRQ(ierr);
128   PetscFunctionReturn(0);
129 }
130 
131 #undef __FUNCT__
132 #define __FUNCT__ "DMPlexCreateNeighborCSR"
133 PetscErrorCode DMPlexCreateNeighborCSR(DM dm, PetscInt cellHeight, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency)
134 {
135   const PetscInt maxFaceCases = 30;
136   PetscInt       numFaceCases = 0;
137   PetscInt       numFaceVertices[30]; /* maxFaceCases, C89 sucks sucks sucks */
138   PetscInt      *off, *adj;
139   PetscInt      *neighborCells = NULL;
140   PetscInt       dim, cellDim, depth = 0, faceDepth, cStart, cEnd, c, numCells, cell;
141   PetscErrorCode ierr;
142 
143   PetscFunctionBegin;
144   /* For parallel partitioning, I think you have to communicate supports */
145   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
146   cellDim = dim - cellHeight;
147   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
148   ierr = DMPlexGetHeightStratum(dm, cellHeight, &cStart, &cEnd);CHKERRQ(ierr);
149   if (cEnd - cStart == 0) {
150     if (numVertices) *numVertices = 0;
151     if (offsets)   *offsets   = NULL;
152     if (adjacency) *adjacency = NULL;
153     PetscFunctionReturn(0);
154   }
155   numCells  = cEnd - cStart;
156   faceDepth = depth - cellHeight;
157   if (dim == depth) {
158     PetscInt f, fStart, fEnd;
159 
160     ierr = PetscCalloc1(numCells+1, &off);CHKERRQ(ierr);
161     /* Count neighboring cells */
162     ierr = DMPlexGetHeightStratum(dm, cellHeight+1, &fStart, &fEnd);CHKERRQ(ierr);
163     for (f = fStart; f < fEnd; ++f) {
164       const PetscInt *support;
165       PetscInt        supportSize;
166       ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
167       ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
168       if (supportSize == 2) {
169         PetscInt numChildren;
170 
171         ierr = DMPlexGetTreeChildren(dm,f,&numChildren,NULL);CHKERRQ(ierr);
172         if (!numChildren) {
173           ++off[support[0]-cStart+1];
174           ++off[support[1]-cStart+1];
175         }
176       }
177     }
178     /* Prefix sum */
179     for (c = 1; c <= numCells; ++c) off[c] += off[c-1];
180     if (adjacency) {
181       PetscInt *tmp;
182 
183       ierr = PetscMalloc1(off[numCells], &adj);CHKERRQ(ierr);
184       ierr = PetscMalloc1(numCells+1, &tmp);CHKERRQ(ierr);
185       ierr = PetscMemcpy(tmp, off, (numCells+1) * sizeof(PetscInt));CHKERRQ(ierr);
186       /* Get neighboring cells */
187       for (f = fStart; f < fEnd; ++f) {
188         const PetscInt *support;
189         PetscInt        supportSize;
190         ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
191         ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
192         if (supportSize == 2) {
193           PetscInt numChildren;
194 
195           ierr = DMPlexGetTreeChildren(dm,f,&numChildren,NULL);CHKERRQ(ierr);
196           if (!numChildren) {
197             adj[tmp[support[0]-cStart]++] = support[1];
198             adj[tmp[support[1]-cStart]++] = support[0];
199           }
200         }
201       }
202 #if defined(PETSC_USE_DEBUG)
203       for (c = 0; c < cEnd-cStart; ++c) if (tmp[c] != off[c+1]) SETERRQ3(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Offset %d != %d for cell %d", tmp[c], off[c], c+cStart);
204 #endif
205       ierr = PetscFree(tmp);CHKERRQ(ierr);
206     }
207     if (numVertices) *numVertices = numCells;
208     if (offsets)   *offsets   = off;
209     if (adjacency) *adjacency = adj;
210     PetscFunctionReturn(0);
211   }
212   /* Setup face recognition */
213   if (faceDepth == 1) {
214     PetscInt cornersSeen[30] = {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0}; /* Could use PetscBT */
215 
216     for (c = cStart; c < cEnd; ++c) {
217       PetscInt corners;
218 
219       ierr = DMPlexGetConeSize(dm, c, &corners);CHKERRQ(ierr);
220       if (!cornersSeen[corners]) {
221         PetscInt nFV;
222 
223         if (numFaceCases >= maxFaceCases) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_PLIB, "Exceeded maximum number of face recognition cases");
224         cornersSeen[corners] = 1;
225 
226         ierr = DMPlexGetNumFaceVertices(dm, cellDim, corners, &nFV);CHKERRQ(ierr);
227 
228         numFaceVertices[numFaceCases++] = nFV;
229       }
230     }
231   }
232   ierr = PetscCalloc1(numCells+1, &off);CHKERRQ(ierr);
233   /* Count neighboring cells */
234   for (cell = cStart; cell < cEnd; ++cell) {
235     PetscInt numNeighbors = PETSC_DETERMINE, n;
236 
237     ierr = DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);CHKERRQ(ierr);
238     /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
239     for (n = 0; n < numNeighbors; ++n) {
240       PetscInt        cellPair[2];
241       PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
242       PetscInt        meetSize = 0;
243       const PetscInt *meet    = NULL;
244 
245       cellPair[0] = cell; cellPair[1] = neighborCells[n];
246       if (cellPair[0] == cellPair[1]) continue;
247       if (!found) {
248         ierr = DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
249         if (meetSize) {
250           PetscInt f;
251 
252           for (f = 0; f < numFaceCases; ++f) {
253             if (numFaceVertices[f] == meetSize) {
254               found = PETSC_TRUE;
255               break;
256             }
257           }
258         }
259         ierr = DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
260       }
261       if (found) ++off[cell-cStart+1];
262     }
263   }
264   /* Prefix sum */
265   for (cell = 1; cell <= numCells; ++cell) off[cell] += off[cell-1];
266 
267   if (adjacency) {
268     ierr = PetscMalloc1(off[numCells], &adj);CHKERRQ(ierr);
269     /* Get neighboring cells */
270     for (cell = cStart; cell < cEnd; ++cell) {
271       PetscInt numNeighbors = PETSC_DETERMINE, n;
272       PetscInt cellOffset   = 0;
273 
274       ierr = DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);CHKERRQ(ierr);
275       /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
276       for (n = 0; n < numNeighbors; ++n) {
277         PetscInt        cellPair[2];
278         PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
279         PetscInt        meetSize = 0;
280         const PetscInt *meet    = NULL;
281 
282         cellPair[0] = cell; cellPair[1] = neighborCells[n];
283         if (cellPair[0] == cellPair[1]) continue;
284         if (!found) {
285           ierr = DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
286           if (meetSize) {
287             PetscInt f;
288 
289             for (f = 0; f < numFaceCases; ++f) {
290               if (numFaceVertices[f] == meetSize) {
291                 found = PETSC_TRUE;
292                 break;
293               }
294             }
295           }
296           ierr = DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
297         }
298         if (found) {
299           adj[off[cell-cStart]+cellOffset] = neighborCells[n];
300           ++cellOffset;
301         }
302       }
303     }
304   }
305   ierr = PetscFree(neighborCells);CHKERRQ(ierr);
306   if (numVertices) *numVertices = numCells;
307   if (offsets)   *offsets   = off;
308   if (adjacency) *adjacency = adj;
309   PetscFunctionReturn(0);
310 }
311 
312 #undef __FUNCT__
313 #define __FUNCT__ "PetscPartitionerRegister"
314 /*@C
315   PetscPartitionerRegister - Adds a new PetscPartitioner implementation
316 
317   Not Collective
318 
319   Input Parameters:
320 + name        - The name of a new user-defined creation routine
321 - create_func - The creation routine itself
322 
323   Notes:
324   PetscPartitionerRegister() may be called multiple times to add several user-defined PetscPartitioners
325 
326   Sample usage:
327 .vb
328     PetscPartitionerRegister("my_part", MyPetscPartitionerCreate);
329 .ve
330 
331   Then, your PetscPartitioner type can be chosen with the procedural interface via
332 .vb
333     PetscPartitionerCreate(MPI_Comm, PetscPartitioner *);
334     PetscPartitionerSetType(PetscPartitioner, "my_part");
335 .ve
336    or at runtime via the option
337 .vb
338     -petscpartitioner_type my_part
339 .ve
340 
341   Level: advanced
342 
343 .keywords: PetscPartitioner, register
344 .seealso: PetscPartitionerRegisterAll(), PetscPartitionerRegisterDestroy()
345 
346 @*/
347 PetscErrorCode PetscPartitionerRegister(const char sname[], PetscErrorCode (*function)(PetscPartitioner))
348 {
349   PetscErrorCode ierr;
350 
351   PetscFunctionBegin;
352   ierr = PetscFunctionListAdd(&PetscPartitionerList, sname, function);CHKERRQ(ierr);
353   PetscFunctionReturn(0);
354 }
355 
356 #undef __FUNCT__
357 #define __FUNCT__ "PetscPartitionerSetType"
358 /*@C
359   PetscPartitionerSetType - Builds a particular PetscPartitioner
360 
361   Collective on PetscPartitioner
362 
363   Input Parameters:
364 + part - The PetscPartitioner object
365 - name - The kind of partitioner
366 
367   Options Database Key:
368 . -petscpartitioner_type <type> - Sets the PetscPartitioner type; use -help for a list of available types
369 
370   Level: intermediate
371 
372 .keywords: PetscPartitioner, set, type
373 .seealso: PetscPartitionerGetType(), PetscPartitionerCreate()
374 @*/
375 PetscErrorCode PetscPartitionerSetType(PetscPartitioner part, PetscPartitionerType name)
376 {
377   PetscErrorCode (*r)(PetscPartitioner);
378   PetscBool      match;
379   PetscErrorCode ierr;
380 
381   PetscFunctionBegin;
382   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
383   ierr = PetscObjectTypeCompare((PetscObject) part, name, &match);CHKERRQ(ierr);
384   if (match) PetscFunctionReturn(0);
385 
386   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
387   ierr = PetscFunctionListFind(PetscPartitionerList, name, &r);CHKERRQ(ierr);
388   if (!r) SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown PetscPartitioner type: %s", name);
389 
390   if (part->ops->destroy) {
391     ierr              = (*part->ops->destroy)(part);CHKERRQ(ierr);
392     part->ops->destroy = NULL;
393   }
394   ierr = (*r)(part);CHKERRQ(ierr);
395   ierr = PetscObjectChangeTypeName((PetscObject) part, name);CHKERRQ(ierr);
396   PetscFunctionReturn(0);
397 }
398 
399 #undef __FUNCT__
400 #define __FUNCT__ "PetscPartitionerGetType"
401 /*@C
402   PetscPartitionerGetType - Gets the PetscPartitioner type name (as a string) from the object.
403 
404   Not Collective
405 
406   Input Parameter:
407 . part - The PetscPartitioner
408 
409   Output Parameter:
410 . name - The PetscPartitioner type name
411 
412   Level: intermediate
413 
414 .keywords: PetscPartitioner, get, type, name
415 .seealso: PetscPartitionerSetType(), PetscPartitionerCreate()
416 @*/
417 PetscErrorCode PetscPartitionerGetType(PetscPartitioner part, PetscPartitionerType *name)
418 {
419   PetscErrorCode ierr;
420 
421   PetscFunctionBegin;
422   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
423   PetscValidPointer(name, 2);
424   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
425   *name = ((PetscObject) part)->type_name;
426   PetscFunctionReturn(0);
427 }
428 
429 #undef __FUNCT__
430 #define __FUNCT__ "PetscPartitionerView"
431 /*@C
432   PetscPartitionerView - Views a PetscPartitioner
433 
434   Collective on PetscPartitioner
435 
436   Input Parameter:
437 + part - the PetscPartitioner object to view
438 - v    - the viewer
439 
440   Level: developer
441 
442 .seealso: PetscPartitionerDestroy()
443 @*/
444 PetscErrorCode PetscPartitionerView(PetscPartitioner part, PetscViewer v)
445 {
446   PetscErrorCode ierr;
447 
448   PetscFunctionBegin;
449   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
450   if (!v) {ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject) part), &v);CHKERRQ(ierr);}
451   if (part->ops->view) {ierr = (*part->ops->view)(part, v);CHKERRQ(ierr);}
452   PetscFunctionReturn(0);
453 }
454 
455 #undef __FUNCT__
456 #define __FUNCT__ "PetscPartitionerSetTypeFromOptions_Internal"
457 PetscErrorCode PetscPartitionerSetTypeFromOptions_Internal(PetscPartitioner part)
458 {
459   const char    *defaultType;
460   char           name[256];
461   PetscBool      flg;
462   PetscErrorCode ierr;
463 
464   PetscFunctionBegin;
465   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
466   if (!((PetscObject) part)->type_name) defaultType = PETSCPARTITIONERCHACO;
467   else                                  defaultType = ((PetscObject) part)->type_name;
468   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
469 
470   ierr = PetscObjectOptionsBegin((PetscObject) part);CHKERRQ(ierr);
471   ierr = PetscOptionsFList("-petscpartitioner_type", "Graph partitioner", "PetscPartitionerSetType", PetscPartitionerList, defaultType, name, 256, &flg);CHKERRQ(ierr);
472   if (flg) {
473     ierr = PetscPartitionerSetType(part, name);CHKERRQ(ierr);
474   } else if (!((PetscObject) part)->type_name) {
475     ierr = PetscPartitionerSetType(part, defaultType);CHKERRQ(ierr);
476   }
477   ierr = PetscOptionsEnd();CHKERRQ(ierr);
478   PetscFunctionReturn(0);
479 }
480 
481 #undef __FUNCT__
482 #define __FUNCT__ "PetscPartitionerSetFromOptions"
483 /*@
484   PetscPartitionerSetFromOptions - sets parameters in a PetscPartitioner from the options database
485 
486   Collective on PetscPartitioner
487 
488   Input Parameter:
489 . part - the PetscPartitioner object to set options for
490 
491   Level: developer
492 
493 .seealso: PetscPartitionerView()
494 @*/
495 PetscErrorCode PetscPartitionerSetFromOptions(PetscPartitioner part)
496 {
497   PetscErrorCode ierr;
498 
499   PetscFunctionBegin;
500   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
501   ierr = PetscPartitionerSetTypeFromOptions_Internal(part);CHKERRQ(ierr);
502 
503   ierr = PetscObjectOptionsBegin((PetscObject) part);CHKERRQ(ierr);
504   if (part->ops->setfromoptions) {ierr = (*part->ops->setfromoptions)(part);CHKERRQ(ierr);}
505   /* process any options handlers added with PetscObjectAddOptionsHandler() */
506   ierr = PetscObjectProcessOptionsHandlers(PetscOptionsObject,(PetscObject) part);CHKERRQ(ierr);
507   ierr = PetscOptionsEnd();CHKERRQ(ierr);
508   ierr = PetscPartitionerViewFromOptions(part, NULL, "-petscpartitioner_view");CHKERRQ(ierr);
509   PetscFunctionReturn(0);
510 }
511 
512 #undef __FUNCT__
513 #define __FUNCT__ "PetscPartitionerSetUp"
514 /*@C
515   PetscPartitionerSetUp - Construct data structures for the PetscPartitioner
516 
517   Collective on PetscPartitioner
518 
519   Input Parameter:
520 . part - the PetscPartitioner object to setup
521 
522   Level: developer
523 
524 .seealso: PetscPartitionerView(), PetscPartitionerDestroy()
525 @*/
526 PetscErrorCode PetscPartitionerSetUp(PetscPartitioner part)
527 {
528   PetscErrorCode ierr;
529 
530   PetscFunctionBegin;
531   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
532   if (part->ops->setup) {ierr = (*part->ops->setup)(part);CHKERRQ(ierr);}
533   PetscFunctionReturn(0);
534 }
535 
536 #undef __FUNCT__
537 #define __FUNCT__ "PetscPartitionerDestroy"
538 /*@
539   PetscPartitionerDestroy - Destroys a PetscPartitioner object
540 
541   Collective on PetscPartitioner
542 
543   Input Parameter:
544 . part - the PetscPartitioner object to destroy
545 
546   Level: developer
547 
548 .seealso: PetscPartitionerView()
549 @*/
550 PetscErrorCode PetscPartitionerDestroy(PetscPartitioner *part)
551 {
552   PetscErrorCode ierr;
553 
554   PetscFunctionBegin;
555   if (!*part) PetscFunctionReturn(0);
556   PetscValidHeaderSpecific((*part), PETSCPARTITIONER_CLASSID, 1);
557 
558   if (--((PetscObject)(*part))->refct > 0) {*part = 0; PetscFunctionReturn(0);}
559   ((PetscObject) (*part))->refct = 0;
560 
561   if ((*part)->ops->destroy) {ierr = (*(*part)->ops->destroy)(*part);CHKERRQ(ierr);}
562   ierr = PetscHeaderDestroy(part);CHKERRQ(ierr);
563   PetscFunctionReturn(0);
564 }
565 
566 #undef __FUNCT__
567 #define __FUNCT__ "PetscPartitionerCreate"
568 /*@
569   PetscPartitionerCreate - Creates an empty PetscPartitioner object. The type can then be set with PetscPartitionerSetType().
570 
571   Collective on MPI_Comm
572 
573   Input Parameter:
574 . comm - The communicator for the PetscPartitioner object
575 
576   Output Parameter:
577 . part - The PetscPartitioner object
578 
579   Level: beginner
580 
581 .seealso: PetscPartitionerSetType(), PETSCPARTITIONERCHACO, PETSCPARTITIONERPARMETIS, PETSCPARTITIONERSHELL, PETSCPARTITIONERSIMPLE, PETSCPARTITIONERGATHER
582 @*/
583 PetscErrorCode PetscPartitionerCreate(MPI_Comm comm, PetscPartitioner *part)
584 {
585   PetscPartitioner p;
586   PetscErrorCode   ierr;
587 
588   PetscFunctionBegin;
589   PetscValidPointer(part, 2);
590   *part = NULL;
591   ierr = PetscFVInitializePackage();CHKERRQ(ierr);
592 
593   ierr = PetscHeaderCreate(p, PETSCPARTITIONER_CLASSID, "PetscPartitioner", "Graph Partitioner", "PetscPartitioner", comm, PetscPartitionerDestroy, PetscPartitionerView);CHKERRQ(ierr);
594 
595   *part = p;
596   PetscFunctionReturn(0);
597 }
598 
599 #undef __FUNCT__
600 #define __FUNCT__ "PetscPartitionerPartition"
601 /*@
602   PetscPartitionerPartition - Create a non-overlapping partition of the cells in the mesh
603 
604   Collective on DM
605 
606   Input Parameters:
607 + part    - The PetscPartitioner
608 - dm      - The mesh DM
609 
610   Output Parameters:
611 + partSection     - The PetscSection giving the division of points by partition
612 - partition       - The list of points by partition
613 
614   Note: Instead of cells, points at a given height can be partitioned by calling PetscPartitionerSetPointHeight()
615 
616   Level: developer
617 
618 .seealso DMPlexDistribute(), PetscPartitionerSetPointHeight(), PetscPartitionerCreate()
619 @*/
620 PetscErrorCode PetscPartitionerPartition(PetscPartitioner part, DM dm, PetscSection partSection, IS *partition)
621 {
622   PetscMPIInt    size;
623   PetscErrorCode ierr;
624 
625   PetscFunctionBegin;
626   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
627   PetscValidHeaderSpecific(dm, DM_CLASSID, 2);
628   PetscValidHeaderSpecific(partSection, PETSC_SECTION_CLASSID, 4);
629   PetscValidPointer(partition, 5);
630   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) part), &size);CHKERRQ(ierr);
631   if (size == 1) {
632     PetscInt *points;
633     PetscInt  cStart, cEnd, c;
634 
635     ierr = DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);CHKERRQ(ierr);
636     ierr = PetscSectionSetChart(partSection, 0, size);CHKERRQ(ierr);
637     ierr = PetscSectionSetDof(partSection, 0, cEnd-cStart);CHKERRQ(ierr);
638     ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
639     ierr = PetscMalloc1(cEnd-cStart, &points);CHKERRQ(ierr);
640     for (c = cStart; c < cEnd; ++c) points[c] = c;
641     ierr = ISCreateGeneral(PetscObjectComm((PetscObject) part), cEnd-cStart, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
642     PetscFunctionReturn(0);
643   }
644   if (part->height == 0) {
645     PetscInt  numVertices;
646     PetscInt *start     = NULL;
647     PetscInt *adjacency = NULL;
648 
649     ierr = DMPlexCreatePartitionerGraph(dm, 0, &numVertices, &start, &adjacency);CHKERRQ(ierr);
650     if (!part->ops->partition) SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_WRONGSTATE, "PetscPartitioner has no type");
651     ierr = (*part->ops->partition)(part, dm, size, numVertices, start, adjacency, partSection, partition);CHKERRQ(ierr);
652     ierr = PetscFree(start);CHKERRQ(ierr);
653     ierr = PetscFree(adjacency);CHKERRQ(ierr);
654   } else SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_OUTOFRANGE, "Invalid height %D for points to partition", part->height);
655   PetscFunctionReturn(0);
656 }
657 
658 #undef __FUNCT__
659 #define __FUNCT__ "PetscPartitionerDestroy_Shell"
660 PetscErrorCode PetscPartitionerDestroy_Shell(PetscPartitioner part)
661 {
662   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
663   PetscErrorCode          ierr;
664 
665   PetscFunctionBegin;
666   ierr = PetscSectionDestroy(&p->section);CHKERRQ(ierr);
667   ierr = ISDestroy(&p->partition);CHKERRQ(ierr);
668   ierr = PetscFree(p);CHKERRQ(ierr);
669   PetscFunctionReturn(0);
670 }
671 
672 #undef __FUNCT__
673 #define __FUNCT__ "PetscPartitionerView_Shell_Ascii"
674 PetscErrorCode PetscPartitionerView_Shell_Ascii(PetscPartitioner part, PetscViewer viewer)
675 {
676   PetscViewerFormat format;
677   PetscErrorCode    ierr;
678 
679   PetscFunctionBegin;
680   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
681   ierr = PetscViewerASCIIPrintf(viewer, "Shell Graph Partitioner:\n");CHKERRQ(ierr);
682   PetscFunctionReturn(0);
683 }
684 
685 #undef __FUNCT__
686 #define __FUNCT__ "PetscPartitionerView_Shell"
687 PetscErrorCode PetscPartitionerView_Shell(PetscPartitioner part, PetscViewer viewer)
688 {
689   PetscBool      iascii;
690   PetscErrorCode ierr;
691 
692   PetscFunctionBegin;
693   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
694   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
695   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
696   if (iascii) {ierr = PetscPartitionerView_Shell_Ascii(part, viewer);CHKERRQ(ierr);}
697   PetscFunctionReturn(0);
698 }
699 
700 #undef __FUNCT__
701 #define __FUNCT__ "PetscPartitionerPartition_Shell"
702 PetscErrorCode PetscPartitionerPartition_Shell(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
703 {
704   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
705   PetscInt                np;
706   PetscErrorCode          ierr;
707 
708   PetscFunctionBegin;
709   ierr = PetscSectionGetChart(p->section, NULL, &np);CHKERRQ(ierr);
710   if (nparts != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of requested partitions %d != configured partitions %d", nparts, np);
711   ierr = ISGetLocalSize(p->partition, &np);CHKERRQ(ierr);
712   if (numVertices != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of input vertices %d != configured vertices %d", numVertices, np);
713   ierr = PetscSectionCopy(p->section, partSection);CHKERRQ(ierr);
714   *partition = p->partition;
715   ierr = PetscObjectReference((PetscObject) p->partition);CHKERRQ(ierr);
716   PetscFunctionReturn(0);
717 }
718 
719 #undef __FUNCT__
720 #define __FUNCT__ "PetscPartitionerInitialize_Shell"
721 PetscErrorCode PetscPartitionerInitialize_Shell(PetscPartitioner part)
722 {
723   PetscFunctionBegin;
724   part->ops->view      = PetscPartitionerView_Shell;
725   part->ops->destroy   = PetscPartitionerDestroy_Shell;
726   part->ops->partition = PetscPartitionerPartition_Shell;
727   PetscFunctionReturn(0);
728 }
729 
730 /*MC
731   PETSCPARTITIONERSHELL = "shell" - A PetscPartitioner object
732 
733   Level: intermediate
734 
735 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
736 M*/
737 
738 #undef __FUNCT__
739 #define __FUNCT__ "PetscPartitionerCreate_Shell"
740 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Shell(PetscPartitioner part)
741 {
742   PetscPartitioner_Shell *p;
743   PetscErrorCode          ierr;
744 
745   PetscFunctionBegin;
746   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
747   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
748   part->data = p;
749 
750   ierr = PetscPartitionerInitialize_Shell(part);CHKERRQ(ierr);
751   PetscFunctionReturn(0);
752 }
753 
754 #undef __FUNCT__
755 #define __FUNCT__ "PetscPartitionerShellSetPartition"
756 /*@C
757   PetscPartitionerShellSetPartition - Set an artifical partition for a mesh
758 
759   Collective on PART
760 
761   Input Parameters:
762 + part     - The PetscPartitioner
763 . numProcs - The number of partitions
764 . sizes    - array of size numProcs (or NULL) providing the number of points in each partition
765 - points   - array of size sum(sizes) (may be NULL iff sizes is NULL) providing the partition each point belongs to
766 
767   Level: developer
768 
769   Notes:
770 
771     It is safe to free the sizes and points arrays after use in this routine.
772 
773 .seealso DMPlexDistribute(), PetscPartitionerCreate()
774 @*/
775 PetscErrorCode PetscPartitionerShellSetPartition(PetscPartitioner part, PetscInt numProcs, const PetscInt sizes[], const PetscInt points[])
776 {
777   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
778   PetscInt                proc, numPoints;
779   PetscErrorCode          ierr;
780 
781   PetscFunctionBegin;
782   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
783   if (sizes) {PetscValidPointer(sizes, 3);}
784   if (sizes) {PetscValidPointer(points, 4);}
785   ierr = PetscSectionDestroy(&p->section);CHKERRQ(ierr);
786   ierr = ISDestroy(&p->partition);CHKERRQ(ierr);
787   ierr = PetscSectionCreate(PetscObjectComm((PetscObject) part), &p->section);CHKERRQ(ierr);
788   ierr = PetscSectionSetChart(p->section, 0, numProcs);CHKERRQ(ierr);
789   if (sizes) {
790     for (proc = 0; proc < numProcs; ++proc) {
791       ierr = PetscSectionSetDof(p->section, proc, sizes[proc]);CHKERRQ(ierr);
792     }
793   }
794   ierr = PetscSectionSetUp(p->section);CHKERRQ(ierr);
795   ierr = PetscSectionGetStorageSize(p->section, &numPoints);CHKERRQ(ierr);
796   ierr = ISCreateGeneral(PetscObjectComm((PetscObject) part), numPoints, points, PETSC_COPY_VALUES, &p->partition);CHKERRQ(ierr);
797   PetscFunctionReturn(0);
798 }
799 
800 #undef __FUNCT__
801 #define __FUNCT__ "PetscPartitionerDestroy_Simple"
802 PetscErrorCode PetscPartitionerDestroy_Simple(PetscPartitioner part)
803 {
804   PetscPartitioner_Simple *p = (PetscPartitioner_Simple *) part->data;
805   PetscErrorCode          ierr;
806 
807   PetscFunctionBegin;
808   ierr = PetscFree(p);CHKERRQ(ierr);
809   PetscFunctionReturn(0);
810 }
811 
812 #undef __FUNCT__
813 #define __FUNCT__ "PetscPartitionerView_Simple_Ascii"
814 PetscErrorCode PetscPartitionerView_Simple_Ascii(PetscPartitioner part, PetscViewer viewer)
815 {
816   PetscViewerFormat format;
817   PetscErrorCode    ierr;
818 
819   PetscFunctionBegin;
820   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
821   ierr = PetscViewerASCIIPrintf(viewer, "Simple Graph Partitioner:\n");CHKERRQ(ierr);
822   PetscFunctionReturn(0);
823 }
824 
825 #undef __FUNCT__
826 #define __FUNCT__ "PetscPartitionerView_Simple"
827 PetscErrorCode PetscPartitionerView_Simple(PetscPartitioner part, PetscViewer viewer)
828 {
829   PetscBool      iascii;
830   PetscErrorCode ierr;
831 
832   PetscFunctionBegin;
833   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
834   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
835   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
836   if (iascii) {ierr = PetscPartitionerView_Simple_Ascii(part, viewer);CHKERRQ(ierr);}
837   PetscFunctionReturn(0);
838 }
839 
840 #undef __FUNCT__
841 #define __FUNCT__ "PetscPartitionerPartition_Simple"
842 PetscErrorCode PetscPartitionerPartition_Simple(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
843 {
844   MPI_Comm       comm;
845   PetscInt       np;
846   PetscMPIInt    size;
847   PetscErrorCode ierr;
848 
849   PetscFunctionBegin;
850   comm = PetscObjectComm((PetscObject)dm);
851   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
852   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
853   ierr = ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);CHKERRQ(ierr);
854   if (size == 1) {
855     for (np = 0; np < nparts; ++np) {ierr = PetscSectionSetDof(partSection, np, numVertices/nparts + ((numVertices % nparts) > np));CHKERRQ(ierr);}
856   }
857   else {
858     PetscMPIInt rank;
859     PetscInt nvGlobal, *offsets, myFirst, myLast;
860 
861     ierr = PetscMalloc1(size+1,&offsets);
862     offsets[0] = 0;
863     ierr = MPI_Allgather(&numVertices,1,MPIU_INT,&offsets[1],1,MPIU_INT,comm);CHKERRQ(ierr);
864     for (np = 2; np <= size; np++) {
865       offsets[np] += offsets[np-1];
866     }
867     nvGlobal = offsets[size];
868     ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
869     myFirst = offsets[rank];
870     myLast  = offsets[rank + 1] - 1;
871     ierr = PetscFree(offsets);CHKERRQ(ierr);
872     if (numVertices) {
873       PetscInt firstPart = 0, firstLargePart = 0;
874       PetscInt lastPart = 0, lastLargePart = 0;
875       PetscInt rem = nvGlobal % nparts;
876       PetscInt pSmall = nvGlobal/nparts;
877       PetscInt pBig = nvGlobal/nparts + 1;
878 
879 
880       if (rem) {
881         firstLargePart = myFirst / pBig;
882         lastLargePart  = myLast  / pBig;
883 
884         if (firstLargePart < rem) {
885           firstPart = firstLargePart;
886         }
887         else {
888           firstPart = rem + (myFirst - (rem * pBig)) / pSmall;
889         }
890         if (lastLargePart < rem) {
891           lastPart = lastLargePart;
892         }
893         else {
894           lastPart = rem + (myLast - (rem * pBig)) / pSmall;
895         }
896       }
897       else {
898         firstPart = myFirst / (nvGlobal/nparts);
899         lastPart  = myLast  / (nvGlobal/nparts);
900       }
901 
902       for (np = firstPart; np <= lastPart; np++) {
903         PetscInt PartStart =  np    * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np);
904         PetscInt PartEnd   = (np+1) * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np+1);
905 
906         PartStart = PetscMax(PartStart,myFirst);
907         PartEnd   = PetscMin(PartEnd,myLast+1);
908         ierr = PetscSectionSetDof(partSection,np,PartEnd-PartStart);CHKERRQ(ierr);
909       }
910     }
911   }
912   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
913   PetscFunctionReturn(0);
914 }
915 
916 #undef __FUNCT__
917 #define __FUNCT__ "PetscPartitionerInitialize_Simple"
918 PetscErrorCode PetscPartitionerInitialize_Simple(PetscPartitioner part)
919 {
920   PetscFunctionBegin;
921   part->ops->view      = PetscPartitionerView_Simple;
922   part->ops->destroy   = PetscPartitionerDestroy_Simple;
923   part->ops->partition = PetscPartitionerPartition_Simple;
924   PetscFunctionReturn(0);
925 }
926 
927 /*MC
928   PETSCPARTITIONERSIMPLE = "simple" - A PetscPartitioner object
929 
930   Level: intermediate
931 
932 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
933 M*/
934 
935 #undef __FUNCT__
936 #define __FUNCT__ "PetscPartitionerCreate_Simple"
937 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Simple(PetscPartitioner part)
938 {
939   PetscPartitioner_Simple *p;
940   PetscErrorCode           ierr;
941 
942   PetscFunctionBegin;
943   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
944   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
945   part->data = p;
946 
947   ierr = PetscPartitionerInitialize_Simple(part);CHKERRQ(ierr);
948   PetscFunctionReturn(0);
949 }
950 
951 #undef __FUNCT__
952 #define __FUNCT__ "PetscPartitionerDestroy_Gather"
953 PetscErrorCode PetscPartitionerDestroy_Gather(PetscPartitioner part)
954 {
955   PetscPartitioner_Gather *p = (PetscPartitioner_Gather *) part->data;
956   PetscErrorCode          ierr;
957 
958   PetscFunctionBegin;
959   ierr = PetscFree(p);CHKERRQ(ierr);
960   PetscFunctionReturn(0);
961 }
962 
963 #undef __FUNCT__
964 #define __FUNCT__ "PetscPartitionerView_Gather_Ascii"
965 PetscErrorCode PetscPartitionerView_Gather_Ascii(PetscPartitioner part, PetscViewer viewer)
966 {
967   PetscViewerFormat format;
968   PetscErrorCode    ierr;
969 
970   PetscFunctionBegin;
971   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
972   ierr = PetscViewerASCIIPrintf(viewer, "Gather Graph Partitioner:\n");CHKERRQ(ierr);
973   PetscFunctionReturn(0);
974 }
975 
976 #undef __FUNCT__
977 #define __FUNCT__ "PetscPartitionerView_Gather"
978 PetscErrorCode PetscPartitionerView_Gather(PetscPartitioner part, PetscViewer viewer)
979 {
980   PetscBool      iascii;
981   PetscErrorCode ierr;
982 
983   PetscFunctionBegin;
984   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
985   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
986   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
987   if (iascii) {ierr = PetscPartitionerView_Gather_Ascii(part, viewer);CHKERRQ(ierr);}
988   PetscFunctionReturn(0);
989 }
990 
991 #undef __FUNCT__
992 #define __FUNCT__ "PetscPartitionerPartition_Gather"
993 PetscErrorCode PetscPartitionerPartition_Gather(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
994 {
995   PetscInt       np;
996   PetscErrorCode ierr;
997 
998   PetscFunctionBegin;
999   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1000   ierr = ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);CHKERRQ(ierr);
1001   ierr = PetscSectionSetDof(partSection,0,numVertices);CHKERRQ(ierr);
1002   for (np = 1; np < nparts; ++np) {ierr = PetscSectionSetDof(partSection, np, 0);CHKERRQ(ierr);}
1003   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1004   PetscFunctionReturn(0);
1005 }
1006 
1007 #undef __FUNCT__
1008 #define __FUNCT__ "PetscPartitionerInitialize_Gather"
1009 PetscErrorCode PetscPartitionerInitialize_Gather(PetscPartitioner part)
1010 {
1011   PetscFunctionBegin;
1012   part->ops->view      = PetscPartitionerView_Gather;
1013   part->ops->destroy   = PetscPartitionerDestroy_Gather;
1014   part->ops->partition = PetscPartitionerPartition_Gather;
1015   PetscFunctionReturn(0);
1016 }
1017 
1018 /*MC
1019   PETSCPARTITIONERGATHER = "gather" - A PetscPartitioner object
1020 
1021   Level: intermediate
1022 
1023 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1024 M*/
1025 
1026 #undef __FUNCT__
1027 #define __FUNCT__ "PetscPartitionerCreate_Gather"
1028 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Gather(PetscPartitioner part)
1029 {
1030   PetscPartitioner_Gather *p;
1031   PetscErrorCode           ierr;
1032 
1033   PetscFunctionBegin;
1034   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1035   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1036   part->data = p;
1037 
1038   ierr = PetscPartitionerInitialize_Gather(part);CHKERRQ(ierr);
1039   PetscFunctionReturn(0);
1040 }
1041 
1042 
1043 #undef __FUNCT__
1044 #define __FUNCT__ "PetscPartitionerDestroy_Chaco"
1045 PetscErrorCode PetscPartitionerDestroy_Chaco(PetscPartitioner part)
1046 {
1047   PetscPartitioner_Chaco *p = (PetscPartitioner_Chaco *) part->data;
1048   PetscErrorCode          ierr;
1049 
1050   PetscFunctionBegin;
1051   ierr = PetscFree(p);CHKERRQ(ierr);
1052   PetscFunctionReturn(0);
1053 }
1054 
1055 #undef __FUNCT__
1056 #define __FUNCT__ "PetscPartitionerView_Chaco_Ascii"
1057 PetscErrorCode PetscPartitionerView_Chaco_Ascii(PetscPartitioner part, PetscViewer viewer)
1058 {
1059   PetscViewerFormat format;
1060   PetscErrorCode    ierr;
1061 
1062   PetscFunctionBegin;
1063   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
1064   ierr = PetscViewerASCIIPrintf(viewer, "Chaco Graph Partitioner:\n");CHKERRQ(ierr);
1065   PetscFunctionReturn(0);
1066 }
1067 
1068 #undef __FUNCT__
1069 #define __FUNCT__ "PetscPartitionerView_Chaco"
1070 PetscErrorCode PetscPartitionerView_Chaco(PetscPartitioner part, PetscViewer viewer)
1071 {
1072   PetscBool      iascii;
1073   PetscErrorCode ierr;
1074 
1075   PetscFunctionBegin;
1076   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1077   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1078   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1079   if (iascii) {ierr = PetscPartitionerView_Chaco_Ascii(part, viewer);CHKERRQ(ierr);}
1080   PetscFunctionReturn(0);
1081 }
1082 
1083 #if defined(PETSC_HAVE_CHACO)
1084 #if defined(PETSC_HAVE_UNISTD_H)
1085 #include <unistd.h>
1086 #endif
1087 /* Chaco does not have an include file */
1088 PETSC_EXTERN int interface(int nvtxs, int *start, int *adjacency, int *vwgts,
1089                        float *ewgts, float *x, float *y, float *z, char *outassignname,
1090                        char *outfilename, short *assignment, int architecture, int ndims_tot,
1091                        int mesh_dims[3], double *goal, int global_method, int local_method,
1092                        int rqi_flag, int vmax, int ndims, double eigtol, long seed);
1093 
1094 extern int FREE_GRAPH;
1095 #endif
1096 
1097 #undef __FUNCT__
1098 #define __FUNCT__ "PetscPartitionerPartition_Chaco"
1099 PetscErrorCode PetscPartitionerPartition_Chaco(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1100 {
1101 #if defined(PETSC_HAVE_CHACO)
1102   enum {DEFAULT_METHOD = 1, INERTIAL_METHOD = 3};
1103   MPI_Comm       comm;
1104   int            nvtxs          = numVertices; /* number of vertices in full graph */
1105   int           *vwgts          = NULL;   /* weights for all vertices */
1106   float         *ewgts          = NULL;   /* weights for all edges */
1107   float         *x              = NULL, *y = NULL, *z = NULL; /* coordinates for inertial method */
1108   char          *outassignname  = NULL;   /*  name of assignment output file */
1109   char          *outfilename    = NULL;   /* output file name */
1110   int            architecture   = 1;      /* 0 => hypercube, d => d-dimensional mesh */
1111   int            ndims_tot      = 0;      /* total number of cube dimensions to divide */
1112   int            mesh_dims[3];            /* dimensions of mesh of processors */
1113   double        *goal          = NULL;    /* desired set sizes for each set */
1114   int            global_method = 1;       /* global partitioning algorithm */
1115   int            local_method  = 1;       /* local partitioning algorithm */
1116   int            rqi_flag      = 0;       /* should I use RQI/Symmlq eigensolver? */
1117   int            vmax          = 200;     /* how many vertices to coarsen down to? */
1118   int            ndims         = 1;       /* number of eigenvectors (2^d sets) */
1119   double         eigtol        = 0.001;   /* tolerance on eigenvectors */
1120   long           seed          = 123636512; /* for random graph mutations */
1121   short int     *assignment;              /* Output partition */
1122   int            fd_stdout, fd_pipe[2];
1123   PetscInt      *points;
1124   int            i, v, p;
1125   PetscErrorCode ierr;
1126 
1127   PetscFunctionBegin;
1128   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
1129   if (!numVertices) {
1130     ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1131     ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1132     ierr = ISCreateGeneral(comm, 0, NULL, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1133     PetscFunctionReturn(0);
1134   }
1135   FREE_GRAPH = 0;                         /* Do not let Chaco free my memory */
1136   for (i = 0; i < start[numVertices]; ++i) ++adjacency[i];
1137 
1138   if (global_method == INERTIAL_METHOD) {
1139     /* manager.createCellCoordinates(nvtxs, &x, &y, &z); */
1140     SETERRQ(comm, PETSC_ERR_SUP, "Inertial partitioning not yet supported");
1141   }
1142   mesh_dims[0] = nparts;
1143   mesh_dims[1] = 1;
1144   mesh_dims[2] = 1;
1145   ierr = PetscMalloc1(nvtxs, &assignment);CHKERRQ(ierr);
1146   /* Chaco outputs to stdout. We redirect this to a buffer. */
1147   /* TODO: check error codes for UNIX calls */
1148 #if defined(PETSC_HAVE_UNISTD_H)
1149   {
1150     int piperet;
1151     piperet = pipe(fd_pipe);
1152     if (piperet) SETERRQ(comm,PETSC_ERR_SYS,"Could not create pipe");
1153     fd_stdout = dup(1);
1154     close(1);
1155     dup2(fd_pipe[1], 1);
1156   }
1157 #endif
1158   ierr = interface(nvtxs, (int*) start, (int*) adjacency, vwgts, ewgts, x, y, z, outassignname, outfilename,
1159                    assignment, architecture, ndims_tot, mesh_dims, goal, global_method, local_method, rqi_flag,
1160                    vmax, ndims, eigtol, seed);
1161 #if defined(PETSC_HAVE_UNISTD_H)
1162   {
1163     char msgLog[10000];
1164     int  count;
1165 
1166     fflush(stdout);
1167     count = read(fd_pipe[0], msgLog, (10000-1)*sizeof(char));
1168     if (count < 0) count = 0;
1169     msgLog[count] = 0;
1170     close(1);
1171     dup2(fd_stdout, 1);
1172     close(fd_stdout);
1173     close(fd_pipe[0]);
1174     close(fd_pipe[1]);
1175     if (ierr) SETERRQ1(comm, PETSC_ERR_LIB, "Error in Chaco library: %s", msgLog);
1176   }
1177 #endif
1178   /* Convert to PetscSection+IS */
1179   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1180   for (v = 0; v < nvtxs; ++v) {
1181     ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);
1182   }
1183   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1184   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
1185   for (p = 0, i = 0; p < nparts; ++p) {
1186     for (v = 0; v < nvtxs; ++v) {
1187       if (assignment[v] == p) points[i++] = v;
1188     }
1189   }
1190   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1191   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1192   if (global_method == INERTIAL_METHOD) {
1193     /* manager.destroyCellCoordinates(nvtxs, &x, &y, &z); */
1194   }
1195   ierr = PetscFree(assignment);CHKERRQ(ierr);
1196   for (i = 0; i < start[numVertices]; ++i) --adjacency[i];
1197   PetscFunctionReturn(0);
1198 #else
1199   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-chaco.");
1200 #endif
1201 }
1202 
1203 #undef __FUNCT__
1204 #define __FUNCT__ "PetscPartitionerInitialize_Chaco"
1205 PetscErrorCode PetscPartitionerInitialize_Chaco(PetscPartitioner part)
1206 {
1207   PetscFunctionBegin;
1208   part->ops->view      = PetscPartitionerView_Chaco;
1209   part->ops->destroy   = PetscPartitionerDestroy_Chaco;
1210   part->ops->partition = PetscPartitionerPartition_Chaco;
1211   PetscFunctionReturn(0);
1212 }
1213 
1214 /*MC
1215   PETSCPARTITIONERCHACO = "chaco" - A PetscPartitioner object using the Chaco library
1216 
1217   Level: intermediate
1218 
1219 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1220 M*/
1221 
1222 #undef __FUNCT__
1223 #define __FUNCT__ "PetscPartitionerCreate_Chaco"
1224 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Chaco(PetscPartitioner part)
1225 {
1226   PetscPartitioner_Chaco *p;
1227   PetscErrorCode          ierr;
1228 
1229   PetscFunctionBegin;
1230   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1231   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1232   part->data = p;
1233 
1234   ierr = PetscPartitionerInitialize_Chaco(part);CHKERRQ(ierr);
1235   ierr = PetscCitationsRegister(ChacoPartitionerCitation, &ChacoPartitionercite);CHKERRQ(ierr);
1236   PetscFunctionReturn(0);
1237 }
1238 
1239 #undef __FUNCT__
1240 #define __FUNCT__ "PetscPartitionerDestroy_ParMetis"
1241 PetscErrorCode PetscPartitionerDestroy_ParMetis(PetscPartitioner part)
1242 {
1243   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
1244   PetscErrorCode             ierr;
1245 
1246   PetscFunctionBegin;
1247   ierr = PetscFree(p);CHKERRQ(ierr);
1248   PetscFunctionReturn(0);
1249 }
1250 
1251 #undef __FUNCT__
1252 #define __FUNCT__ "PetscPartitionerView_ParMetis_Ascii"
1253 PetscErrorCode PetscPartitionerView_ParMetis_Ascii(PetscPartitioner part, PetscViewer viewer)
1254 {
1255   PetscViewerFormat format;
1256   PetscErrorCode    ierr;
1257 
1258   PetscFunctionBegin;
1259   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
1260   ierr = PetscViewerASCIIPrintf(viewer, "ParMetis Graph Partitioner:\n");CHKERRQ(ierr);
1261   PetscFunctionReturn(0);
1262 }
1263 
1264 #undef __FUNCT__
1265 #define __FUNCT__ "PetscPartitionerView_ParMetis"
1266 PetscErrorCode PetscPartitionerView_ParMetis(PetscPartitioner part, PetscViewer viewer)
1267 {
1268   PetscBool      iascii;
1269   PetscErrorCode ierr;
1270 
1271   PetscFunctionBegin;
1272   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1273   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1274   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1275   if (iascii) {ierr = PetscPartitionerView_ParMetis_Ascii(part, viewer);CHKERRQ(ierr);}
1276   PetscFunctionReturn(0);
1277 }
1278 
1279 #if defined(PETSC_HAVE_PARMETIS)
1280 #include <parmetis.h>
1281 #endif
1282 
1283 #undef __FUNCT__
1284 #define __FUNCT__ "PetscPartitionerPartition_ParMetis"
1285 PetscErrorCode PetscPartitionerPartition_ParMetis(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1286 {
1287 #if defined(PETSC_HAVE_PARMETIS)
1288   MPI_Comm       comm;
1289   PetscInt       nvtxs      = numVertices; /* The number of vertices in full graph */
1290   PetscInt      *vtxdist;                  /* Distribution of vertices across processes */
1291   PetscInt      *xadj       = start;       /* Start of edge list for each vertex */
1292   PetscInt      *adjncy     = adjacency;   /* Edge lists for all vertices */
1293   PetscInt      *vwgt       = NULL;        /* Vertex weights */
1294   PetscInt      *adjwgt     = NULL;        /* Edge weights */
1295   PetscInt       wgtflag    = 0;           /* Indicates which weights are present */
1296   PetscInt       numflag    = 0;           /* Indicates initial offset (0 or 1) */
1297   PetscInt       ncon       = 1;           /* The number of weights per vertex */
1298   PetscReal     *tpwgts;                   /* The fraction of vertex weights assigned to each partition */
1299   PetscReal     *ubvec;                    /* The balance intolerance for vertex weights */
1300   PetscInt       options[5];               /* Options */
1301   /* Outputs */
1302   PetscInt       edgeCut;                  /* The number of edges cut by the partition */
1303   PetscInt      *assignment, *points;
1304   PetscMPIInt    rank, p, v, i;
1305   PetscErrorCode ierr;
1306 
1307   PetscFunctionBegin;
1308   ierr = PetscObjectGetComm((PetscObject) part, &comm);CHKERRQ(ierr);
1309   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1310   options[0] = 0; /* Use all defaults */
1311   /* Calculate vertex distribution */
1312   ierr = PetscMalloc4(nparts+1,&vtxdist,nparts*ncon,&tpwgts,ncon,&ubvec,nvtxs,&assignment);CHKERRQ(ierr);
1313   vtxdist[0] = 0;
1314   ierr = MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm);CHKERRQ(ierr);
1315   for (p = 2; p <= nparts; ++p) {
1316     vtxdist[p] += vtxdist[p-1];
1317   }
1318   /* Calculate weights */
1319   for (p = 0; p < nparts; ++p) {
1320     tpwgts[p] = 1.0/nparts;
1321   }
1322   ubvec[0] = 1.05;
1323 
1324   if (nparts == 1) {
1325     ierr = PetscMemzero(assignment, nvtxs * sizeof(PetscInt));
1326   } else {
1327     if (vtxdist[1] == vtxdist[nparts]) {
1328       if (!rank) {
1329         PetscStackPush("METIS_PartGraphKway");
1330         ierr = METIS_PartGraphKway(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, adjwgt, &nparts, tpwgts, ubvec, NULL, &edgeCut, assignment);
1331         PetscStackPop;
1332         if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphKway()");
1333       }
1334     } else {
1335       PetscStackPush("ParMETIS_V3_PartKway");
1336       ierr = ParMETIS_V3_PartKway(vtxdist, xadj, adjncy, vwgt, adjwgt, &wgtflag, &numflag, &ncon, &nparts, tpwgts, ubvec, options, &edgeCut, assignment, &comm);
1337       PetscStackPop;
1338       if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in ParMETIS_V3_PartKway()");
1339     }
1340   }
1341   /* Convert to PetscSection+IS */
1342   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1343   for (v = 0; v < nvtxs; ++v) {ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);}
1344   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1345   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
1346   for (p = 0, i = 0; p < nparts; ++p) {
1347     for (v = 0; v < nvtxs; ++v) {
1348       if (assignment[v] == p) points[i++] = v;
1349     }
1350   }
1351   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1352   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1353   ierr = PetscFree4(vtxdist,tpwgts,ubvec,assignment);CHKERRQ(ierr);
1354   PetscFunctionReturn(0);
1355 #else
1356   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-parmetis.");
1357 #endif
1358 }
1359 
1360 #undef __FUNCT__
1361 #define __FUNCT__ "PetscPartitionerInitialize_ParMetis"
1362 PetscErrorCode PetscPartitionerInitialize_ParMetis(PetscPartitioner part)
1363 {
1364   PetscFunctionBegin;
1365   part->ops->view      = PetscPartitionerView_ParMetis;
1366   part->ops->destroy   = PetscPartitionerDestroy_ParMetis;
1367   part->ops->partition = PetscPartitionerPartition_ParMetis;
1368   PetscFunctionReturn(0);
1369 }
1370 
1371 /*MC
1372   PETSCPARTITIONERPARMETIS = "parmetis" - A PetscPartitioner object using the ParMetis library
1373 
1374   Level: intermediate
1375 
1376 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1377 M*/
1378 
1379 #undef __FUNCT__
1380 #define __FUNCT__ "PetscPartitionerCreate_ParMetis"
1381 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_ParMetis(PetscPartitioner part)
1382 {
1383   PetscPartitioner_ParMetis *p;
1384   PetscErrorCode          ierr;
1385 
1386   PetscFunctionBegin;
1387   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1388   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1389   part->data = p;
1390 
1391   ierr = PetscPartitionerInitialize_ParMetis(part);CHKERRQ(ierr);
1392   ierr = PetscCitationsRegister(ParMetisPartitionerCitation, &ParMetisPartitionercite);CHKERRQ(ierr);
1393   PetscFunctionReturn(0);
1394 }
1395 
1396 #undef __FUNCT__
1397 #define __FUNCT__ "DMPlexGetPartitioner"
1398 /*@
1399   DMPlexGetPartitioner - Get the mesh partitioner
1400 
1401   Not collective
1402 
1403   Input Parameter:
1404 . dm - The DM
1405 
1406   Output Parameter:
1407 . part - The PetscPartitioner
1408 
1409   Level: developer
1410 
1411   Note: This gets a borrowed reference, so the user should not destroy this PetscPartitioner.
1412 
1413 .seealso DMPlexDistribute(), DMPlexSetPartitioner(), PetscPartitionerCreate()
1414 @*/
1415 PetscErrorCode DMPlexGetPartitioner(DM dm, PetscPartitioner *part)
1416 {
1417   DM_Plex *mesh = (DM_Plex *) dm->data;
1418 
1419   PetscFunctionBegin;
1420   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1421   PetscValidPointer(part, 2);
1422   *part = mesh->partitioner;
1423   PetscFunctionReturn(0);
1424 }
1425 
1426 #undef __FUNCT__
1427 #define __FUNCT__ "DMPlexSetPartitioner"
1428 /*@
1429   DMPlexSetPartitioner - Set the mesh partitioner
1430 
1431   logically collective on dm and part
1432 
1433   Input Parameters:
1434 + dm - The DM
1435 - part - The partitioner
1436 
1437   Level: developer
1438 
1439   Note: Any existing PetscPartitioner will be destroyed.
1440 
1441 .seealso DMPlexDistribute(), DMPlexGetPartitioner(), PetscPartitionerCreate()
1442 @*/
1443 PetscErrorCode DMPlexSetPartitioner(DM dm, PetscPartitioner part)
1444 {
1445   DM_Plex       *mesh = (DM_Plex *) dm->data;
1446   PetscErrorCode ierr;
1447 
1448   PetscFunctionBegin;
1449   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1450   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 2);
1451   ierr = PetscObjectReference((PetscObject)part);CHKERRQ(ierr);
1452   ierr = PetscPartitionerDestroy(&mesh->partitioner);CHKERRQ(ierr);
1453   mesh->partitioner = part;
1454   PetscFunctionReturn(0);
1455 }
1456 
1457 #undef __FUNCT__
1458 #define __FUNCT__ "DMPlexPartitionLabelClosure_Tree"
1459 static PetscErrorCode DMPlexPartitionLabelClosure_Tree(DM dm, DMLabel label, PetscInt rank, PetscInt point, PetscBool up, PetscBool down)
1460 {
1461   PetscErrorCode ierr;
1462 
1463   PetscFunctionBegin;
1464   if (up) {
1465     PetscInt parent;
1466 
1467     ierr = DMPlexGetTreeParent(dm,point,&parent,NULL);CHKERRQ(ierr);
1468     if (parent != point) {
1469       PetscInt closureSize, *closure = NULL, i;
1470 
1471       ierr = DMPlexGetTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);CHKERRQ(ierr);
1472       for (i = 0; i < closureSize; i++) {
1473         PetscInt cpoint = closure[2*i];
1474 
1475         ierr = DMLabelSetValue(label,cpoint,rank);CHKERRQ(ierr);
1476         ierr = DMPlexPartitionLabelClosure_Tree(dm,label,rank,cpoint,PETSC_TRUE,PETSC_FALSE);CHKERRQ(ierr);
1477       }
1478       ierr = DMPlexRestoreTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);CHKERRQ(ierr);
1479     }
1480   }
1481   if (down) {
1482     PetscInt numChildren;
1483     const PetscInt *children;
1484 
1485     ierr = DMPlexGetTreeChildren(dm,point,&numChildren,&children);CHKERRQ(ierr);
1486     if (numChildren) {
1487       PetscInt i;
1488 
1489       for (i = 0; i < numChildren; i++) {
1490         PetscInt cpoint = children[i];
1491 
1492         ierr = DMLabelSetValue(label,cpoint,rank);CHKERRQ(ierr);
1493         ierr = DMPlexPartitionLabelClosure_Tree(dm,label,rank,cpoint,PETSC_FALSE,PETSC_TRUE);CHKERRQ(ierr);
1494       }
1495     }
1496   }
1497   PetscFunctionReturn(0);
1498 }
1499 
1500 #undef __FUNCT__
1501 #define __FUNCT__ "DMPlexPartitionLabelClosure"
1502 /*@
1503   DMPlexPartitionLabelClosure - Add the closure of all points to the partition label
1504 
1505   Input Parameters:
1506 + dm     - The DM
1507 - label  - DMLabel assinging ranks to remote roots
1508 
1509   Level: developer
1510 
1511 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1512 @*/
1513 PetscErrorCode DMPlexPartitionLabelClosure(DM dm, DMLabel label)
1514 {
1515   IS              rankIS,   pointIS;
1516   const PetscInt *ranks,   *points;
1517   PetscInt        numRanks, numPoints, r, p, c, closureSize;
1518   PetscInt       *closure = NULL;
1519   PetscErrorCode  ierr;
1520 
1521   PetscFunctionBegin;
1522   ierr = DMLabelGetValueIS(label, &rankIS);CHKERRQ(ierr);
1523   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
1524   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
1525   for (r = 0; r < numRanks; ++r) {
1526     const PetscInt rank = ranks[r];
1527 
1528     ierr = DMLabelGetStratumIS(label, rank, &pointIS);CHKERRQ(ierr);
1529     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
1530     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
1531     for (p = 0; p < numPoints; ++p) {
1532       ierr = DMPlexGetTransitiveClosure(dm, points[p], PETSC_TRUE, &closureSize, &closure);CHKERRQ(ierr);
1533       for (c = 0; c < closureSize*2; c += 2) {
1534         ierr = DMLabelSetValue(label, closure[c], rank);CHKERRQ(ierr);
1535         ierr = DMPlexPartitionLabelClosure_Tree(dm,label,rank,closure[c],PETSC_TRUE,PETSC_TRUE);CHKERRQ(ierr);
1536       }
1537     }
1538     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
1539     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1540   }
1541   if (closure) {ierr = DMPlexRestoreTransitiveClosure(dm, 0, PETSC_TRUE, &closureSize, &closure);CHKERRQ(ierr);}
1542   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
1543   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
1544   PetscFunctionReturn(0);
1545 }
1546 
1547 #undef __FUNCT__
1548 #define __FUNCT__ "DMPlexPartitionLabelAdjacency"
1549 /*@
1550   DMPlexPartitionLabelAdjacency - Add one level of adjacent points to the partition label
1551 
1552   Input Parameters:
1553 + dm     - The DM
1554 - label  - DMLabel assinging ranks to remote roots
1555 
1556   Level: developer
1557 
1558 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1559 @*/
1560 PetscErrorCode DMPlexPartitionLabelAdjacency(DM dm, DMLabel label)
1561 {
1562   IS              rankIS,   pointIS;
1563   const PetscInt *ranks,   *points;
1564   PetscInt        numRanks, numPoints, r, p, a, adjSize;
1565   PetscInt       *adj = NULL;
1566   PetscErrorCode  ierr;
1567 
1568   PetscFunctionBegin;
1569   ierr = DMLabelGetValueIS(label, &rankIS);CHKERRQ(ierr);
1570   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
1571   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
1572   for (r = 0; r < numRanks; ++r) {
1573     const PetscInt rank = ranks[r];
1574 
1575     ierr = DMLabelGetStratumIS(label, rank, &pointIS);CHKERRQ(ierr);
1576     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
1577     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
1578     for (p = 0; p < numPoints; ++p) {
1579       adjSize = PETSC_DETERMINE;
1580       ierr = DMPlexGetAdjacency(dm, points[p], &adjSize, &adj);CHKERRQ(ierr);
1581       for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(label, adj[a], rank);CHKERRQ(ierr);}
1582     }
1583     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
1584     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1585   }
1586   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
1587   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
1588   ierr = PetscFree(adj);CHKERRQ(ierr);
1589   PetscFunctionReturn(0);
1590 }
1591 
1592 #undef __FUNCT__
1593 #define __FUNCT__ "DMPlexPartitionLabelPropagate"
1594 /*@
1595   DMPlexPartitionLabelPropagate - Propagate points in a partition label over the point SF
1596 
1597   Input Parameters:
1598 + dm     - The DM
1599 - label  - DMLabel assinging ranks to remote roots
1600 
1601   Level: developer
1602 
1603   Note: This is required when generating multi-level overlaps to capture
1604   overlap points from non-neighbouring partitions.
1605 
1606 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1607 @*/
1608 PetscErrorCode DMPlexPartitionLabelPropagate(DM dm, DMLabel label)
1609 {
1610   MPI_Comm        comm;
1611   PetscMPIInt     rank;
1612   PetscSF         sfPoint;
1613   DMLabel         lblRoots, lblLeaves;
1614   IS              rankIS, pointIS;
1615   const PetscInt *ranks;
1616   PetscInt        numRanks, r;
1617   PetscErrorCode  ierr;
1618 
1619   PetscFunctionBegin;
1620   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
1621   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1622   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
1623   /* Pull point contributions from remote leaves into local roots */
1624   ierr = DMLabelGather(label, sfPoint, &lblLeaves);CHKERRQ(ierr);
1625   ierr = DMLabelGetValueIS(lblLeaves, &rankIS);CHKERRQ(ierr);
1626   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
1627   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
1628   for (r = 0; r < numRanks; ++r) {
1629     const PetscInt remoteRank = ranks[r];
1630     if (remoteRank == rank) continue;
1631     ierr = DMLabelGetStratumIS(lblLeaves, remoteRank, &pointIS);CHKERRQ(ierr);
1632     ierr = DMLabelInsertIS(label, pointIS, remoteRank);CHKERRQ(ierr);
1633     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1634   }
1635   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
1636   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
1637   ierr = DMLabelDestroy(&lblLeaves);CHKERRQ(ierr);
1638   /* Push point contributions from roots into remote leaves */
1639   ierr = DMLabelDistribute(label, sfPoint, &lblRoots);CHKERRQ(ierr);
1640   ierr = DMLabelGetValueIS(lblRoots, &rankIS);CHKERRQ(ierr);
1641   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
1642   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
1643   for (r = 0; r < numRanks; ++r) {
1644     const PetscInt remoteRank = ranks[r];
1645     if (remoteRank == rank) continue;
1646     ierr = DMLabelGetStratumIS(lblRoots, remoteRank, &pointIS);CHKERRQ(ierr);
1647     ierr = DMLabelInsertIS(label, pointIS, remoteRank);CHKERRQ(ierr);
1648     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1649   }
1650   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
1651   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
1652   ierr = DMLabelDestroy(&lblRoots);CHKERRQ(ierr);
1653   PetscFunctionReturn(0);
1654 }
1655 
1656 #undef __FUNCT__
1657 #define __FUNCT__ "DMPlexPartitionLabelInvert"
1658 /*@
1659   DMPlexPartitionLabelInvert - Create a partition label of remote roots from a local root label
1660 
1661   Input Parameters:
1662 + dm        - The DM
1663 . rootLabel - DMLabel assinging ranks to local roots
1664 . processSF - A star forest mapping into the local index on each remote rank
1665 
1666   Output Parameter:
1667 - leafLabel - DMLabel assinging ranks to remote roots
1668 
1669   Note: The rootLabel defines a send pattern by mapping local points to remote target ranks. The
1670   resulting leafLabel is a receiver mapping of remote roots to their parent rank.
1671 
1672   Level: developer
1673 
1674 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1675 @*/
1676 PetscErrorCode DMPlexPartitionLabelInvert(DM dm, DMLabel rootLabel, PetscSF processSF, DMLabel leafLabel)
1677 {
1678   MPI_Comm           comm;
1679   PetscMPIInt        rank, numProcs;
1680   PetscInt           p, n, numNeighbors, size, l, nleaves;
1681   PetscSF            sfPoint;
1682   PetscSFNode       *rootPoints, *leafPoints;
1683   PetscSection       rootSection, leafSection;
1684   const PetscSFNode *remote;
1685   const PetscInt    *local, *neighbors;
1686   IS                 valueIS;
1687   PetscErrorCode     ierr;
1688 
1689   PetscFunctionBegin;
1690   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
1691   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1692   ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr);
1693   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
1694 
1695   /* Convert to (point, rank) and use actual owners */
1696   ierr = PetscSectionCreate(comm, &rootSection);CHKERRQ(ierr);
1697   ierr = PetscSectionSetChart(rootSection, 0, numProcs);CHKERRQ(ierr);
1698   ierr = DMLabelGetValueIS(rootLabel, &valueIS);CHKERRQ(ierr);
1699   ierr = ISGetLocalSize(valueIS, &numNeighbors);CHKERRQ(ierr);
1700   ierr = ISGetIndices(valueIS, &neighbors);CHKERRQ(ierr);
1701   for (n = 0; n < numNeighbors; ++n) {
1702     PetscInt numPoints;
1703 
1704     ierr = DMLabelGetStratumSize(rootLabel, neighbors[n], &numPoints);CHKERRQ(ierr);
1705     ierr = PetscSectionAddDof(rootSection, neighbors[n], numPoints);CHKERRQ(ierr);
1706   }
1707   ierr = PetscSectionSetUp(rootSection);CHKERRQ(ierr);
1708   ierr = PetscSectionGetStorageSize(rootSection, &size);CHKERRQ(ierr);
1709   ierr = PetscMalloc1(size, &rootPoints);CHKERRQ(ierr);
1710   ierr = PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);CHKERRQ(ierr);
1711   for (n = 0; n < numNeighbors; ++n) {
1712     IS              pointIS;
1713     const PetscInt *points;
1714     PetscInt        off, numPoints, p;
1715 
1716     ierr = PetscSectionGetOffset(rootSection, neighbors[n], &off);CHKERRQ(ierr);
1717     ierr = DMLabelGetStratumIS(rootLabel, neighbors[n], &pointIS);CHKERRQ(ierr);
1718     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
1719     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
1720     for (p = 0; p < numPoints; ++p) {
1721       if (local) {ierr = PetscFindInt(points[p], nleaves, local, &l);CHKERRQ(ierr);}
1722       else       {l = -1;}
1723       if (l >= 0) {rootPoints[off+p] = remote[l];}
1724       else        {rootPoints[off+p].index = points[p]; rootPoints[off+p].rank = rank;}
1725     }
1726     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
1727     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1728   }
1729   ierr = ISRestoreIndices(valueIS, &neighbors);CHKERRQ(ierr);
1730   ierr = ISDestroy(&valueIS);CHKERRQ(ierr);
1731   /* Communicate overlap */
1732   ierr = PetscSectionCreate(comm, &leafSection);CHKERRQ(ierr);
1733   ierr = DMPlexDistributeData(dm, processSF, rootSection, MPIU_2INT, rootPoints, leafSection, (void**) &leafPoints);CHKERRQ(ierr);
1734   /* Filter remote contributions (ovLeafPoints) into the overlapSF */
1735   ierr = PetscSectionGetStorageSize(leafSection, &size);CHKERRQ(ierr);
1736   for (p = 0; p < size; p++) {
1737     ierr = DMLabelSetValue(leafLabel, leafPoints[p].index, leafPoints[p].rank);CHKERRQ(ierr);
1738   }
1739   ierr = PetscFree(rootPoints);CHKERRQ(ierr);
1740   ierr = PetscSectionDestroy(&rootSection);CHKERRQ(ierr);
1741   ierr = PetscFree(leafPoints);CHKERRQ(ierr);
1742   ierr = PetscSectionDestroy(&leafSection);CHKERRQ(ierr);
1743   PetscFunctionReturn(0);
1744 }
1745 
1746 #undef __FUNCT__
1747 #define __FUNCT__ "DMPlexPartitionLabelCreateSF"
1748 /*@
1749   DMPlexPartitionLabelCreateSF - Create a star forest from a label that assigns ranks to points
1750 
1751   Input Parameters:
1752 + dm    - The DM
1753 . label - DMLabel assinging ranks to remote roots
1754 
1755   Output Parameter:
1756 - sf    - The star forest communication context encapsulating the defined mapping
1757 
1758   Note: The incoming label is a receiver mapping of remote points to their parent rank.
1759 
1760   Level: developer
1761 
1762 .seealso: DMPlexDistribute(), DMPlexCreateOverlap
1763 @*/
1764 PetscErrorCode DMPlexPartitionLabelCreateSF(DM dm, DMLabel label, PetscSF *sf)
1765 {
1766   PetscMPIInt     rank, numProcs;
1767   PetscInt        n, numRemote, p, numPoints, pStart, pEnd, idx = 0;
1768   PetscSFNode    *remotePoints;
1769   IS              remoteRootIS;
1770   const PetscInt *remoteRoots;
1771   PetscErrorCode ierr;
1772 
1773   PetscFunctionBegin;
1774   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
1775   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &numProcs);CHKERRQ(ierr);
1776 
1777   for (numRemote = 0, n = 0; n < numProcs; ++n) {
1778     ierr = DMLabelGetStratumSize(label, n, &numPoints);CHKERRQ(ierr);
1779     numRemote += numPoints;
1780   }
1781   ierr = PetscMalloc1(numRemote, &remotePoints);CHKERRQ(ierr);
1782   /* Put owned points first */
1783   ierr = DMLabelGetStratumSize(label, rank, &numPoints);CHKERRQ(ierr);
1784   if (numPoints > 0) {
1785     ierr = DMLabelGetStratumIS(label, rank, &remoteRootIS);CHKERRQ(ierr);
1786     ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
1787     for (p = 0; p < numPoints; p++) {
1788       remotePoints[idx].index = remoteRoots[p];
1789       remotePoints[idx].rank = rank;
1790       idx++;
1791     }
1792     ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
1793     ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr);
1794   }
1795   /* Now add remote points */
1796   for (n = 0; n < numProcs; ++n) {
1797     ierr = DMLabelGetStratumSize(label, n, &numPoints);CHKERRQ(ierr);
1798     if (numPoints <= 0 || n == rank) continue;
1799     ierr = DMLabelGetStratumIS(label, n, &remoteRootIS);CHKERRQ(ierr);
1800     ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
1801     for (p = 0; p < numPoints; p++) {
1802       remotePoints[idx].index = remoteRoots[p];
1803       remotePoints[idx].rank = n;
1804       idx++;
1805     }
1806     ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
1807     ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr);
1808   }
1809   ierr = PetscSFCreate(PetscObjectComm((PetscObject) dm), sf);CHKERRQ(ierr);
1810   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
1811   ierr = PetscSFSetGraph(*sf, pEnd-pStart, numRemote, NULL, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);CHKERRQ(ierr);
1812   PetscFunctionReturn(0);
1813 }
1814