xref: /petsc/src/dm/impls/plex/tests/ex12.c (revision f97672e55eacc8688507b9471cd7ec2664d7f203)
1 static char help[] = "Partition a mesh in parallel, perhaps with overlap\n\n";
2 
3 #include <petscdmplex.h>
4 #include <petscsf.h>
5 
6 /* Sample usage:
7 
8 Load a file in serial and distribute it on 24 processes:
9 
10   make -f ./gmakefile test globsearch="dm_impls_plex_tests-ex12_0" EXTRA_OPTIONS="-filename $PETSC_DIR/share/petsc/datafiles/meshes/squaremotor-30.exo -orig_dm_view -dm_view" NP=24
11 
12 Load a file in serial and distribute it on 24 processes using a custom partitioner:
13 
14   make -f ./gmakefile test globsearch="dm_impls_plex_tests-ex12_0" EXTRA_OPTIONS="-filename $PETSC_DIR/share/petsc/datafiles/meshes/cylinder.med -petscpartitioner_type simple -orig_dm_view -dm_view" NP=24
15 
16 Load a file in serial, distribute it, and then redistribute it on 24 processes using two different partitioners:
17 
18   make -f ./gmakefile test globsearch="dm_impls_plex_tests-ex12_0" EXTRA_OPTIONS="-filename $PETSC_DIR/share/petsc/datafiles/meshes/squaremotor-30.exo -petscpartitioner_type simple -load_balance -lb_petscpartitioner_type parmetis -orig_dm_view -dm_view" NP=24
19 
20 Load a file in serial, distribute it randomly, refine it in parallel, and then redistribute it on 24 processes using two different partitioners, and view to VTK:
21 
22   make -f ./gmakefile test globsearch="dm_impls_plex_tests-ex12_0" EXTRA_OPTIONS="-filename $PETSC_DIR/share/petsc/datafiles/meshes/squaremotor-30.exo -petscpartitioner_type shell -petscpartitioner_shell_random -dm_refine 1 -load_balance -lb_petscpartitioner_type parmetis -prelb_dm_view vtk:$PWD/prelb.vtk -dm_view vtk:$PWD/balance.vtk -dm_partition_view" NP=24
23 
24 */
25 
26 enum {STAGE_LOAD, STAGE_DISTRIBUTE, STAGE_REFINE, STAGE_REDISTRIBUTE};
27 
28 typedef struct {
29   /* Domain and mesh definition */
30   PetscInt  overlap;                      /* The cell overlap to use during partitioning */
31   PetscBool testPartition;                /* Use a fixed partitioning for testing */
32   PetscBool testRedundant;                /* Use a redundant partitioning for testing */
33   PetscBool loadBalance;                  /* Load balance via a second distribute step */
34   PetscBool partitionBalance;             /* Balance shared point partition */
35   PetscLogStage stages[4];
36 } AppCtx;
37 
38 PetscErrorCode ProcessOptions(MPI_Comm comm, AppCtx *options)
39 {
40   PetscFunctionBegin;
41   options->overlap          = 0;
42   options->testPartition    = PETSC_FALSE;
43   options->testRedundant    = PETSC_FALSE;
44   options->loadBalance      = PETSC_FALSE;
45   options->partitionBalance = PETSC_FALSE;
46 
47   PetscOptionsBegin(comm, "", "Meshing Problem Options", "DMPLEX");
48   PetscCall(PetscOptionsBoundedInt("-overlap", "The cell overlap for partitioning", "ex12.c", options->overlap, &options->overlap, NULL,0));
49   PetscCall(PetscOptionsBool("-test_partition", "Use a fixed partition for testing", "ex12.c", options->testPartition, &options->testPartition, NULL));
50   PetscCall(PetscOptionsBool("-test_redundant", "Use a redundant partition for testing", "ex12.c", options->testRedundant, &options->testRedundant, NULL));
51   PetscCall(PetscOptionsBool("-load_balance", "Perform parallel load balancing in a second distribution step", "ex12.c", options->loadBalance, &options->loadBalance, NULL));
52   PetscCall(PetscOptionsBool("-partition_balance", "Balance the ownership of shared points", "ex12.c", options->partitionBalance, &options->partitionBalance, NULL));
53   PetscOptionsEnd();
54 
55   PetscCall(PetscLogStageRegister("MeshLoad",         &options->stages[STAGE_LOAD]));
56   PetscCall(PetscLogStageRegister("MeshDistribute",   &options->stages[STAGE_DISTRIBUTE]));
57   PetscCall(PetscLogStageRegister("MeshRefine",       &options->stages[STAGE_REFINE]));
58   PetscCall(PetscLogStageRegister("MeshRedistribute", &options->stages[STAGE_REDISTRIBUTE]));
59   PetscFunctionReturn(0);
60 }
61 
62 PetscErrorCode CreateMesh(MPI_Comm comm, AppCtx *user, DM *dm)
63 {
64   DM             pdm             = NULL;
65   PetscInt       triSizes_n2[2]  = {4, 4};
66   PetscInt       triPoints_n2[8] = {0, 1, 4, 6, 2, 3, 5, 7};
67   PetscInt       triSizes_n3[3]  = {3, 2, 3};
68   PetscInt       triPoints_n3[8] = {3, 5, 6, 1, 7, 0, 2, 4};
69   PetscInt       triSizes_n4[4]  = {2, 2, 2, 2};
70   PetscInt       triPoints_n4[8] = {0, 7, 1, 5, 2, 3, 4, 6};
71   PetscInt       triSizes_n8[8]  = {1, 1, 1, 1, 1, 1, 1, 1};
72   PetscInt       triPoints_n8[8] = {0, 1, 2, 3, 4, 5, 6, 7};
73   PetscInt       quadSizes[2]    = {2, 2};
74   PetscInt       quadPoints[4]   = {2, 3, 0, 1};
75   PetscInt       overlap         = user->overlap >= 0 ? user->overlap : 0;
76   PetscInt       dim;
77   PetscBool      simplex;
78   PetscMPIInt    rank, size;
79 
80   PetscFunctionBegin;
81   PetscCallMPI(MPI_Comm_rank(comm, &rank));
82   PetscCallMPI(MPI_Comm_size(comm, &size));
83   PetscCall(PetscLogStagePush(user->stages[STAGE_LOAD]));
84   PetscCall(DMCreate(comm, dm));
85   PetscCall(DMSetType(*dm, DMPLEX));
86   PetscCall(DMPlexDistributeSetDefault(*dm, PETSC_FALSE));
87   PetscCall(DMSetFromOptions(*dm));
88   PetscCall(DMViewFromOptions(*dm, NULL, "-orig_dm_view"));
89   PetscCall(PetscLogStagePop());
90   PetscCall(DMGetDimension(*dm, &dim));
91   PetscCall(DMPlexIsSimplex(*dm, &simplex));
92   PetscCall(PetscLogStagePush(user->stages[STAGE_DISTRIBUTE]));
93   if (!user->testRedundant) {
94     PetscPartitioner part;
95 
96     PetscCall(DMPlexGetPartitioner(*dm, &part));
97     PetscCall(PetscPartitionerSetFromOptions(part));
98     PetscCall(DMPlexSetPartitionBalance(*dm, user->partitionBalance));
99     if (user->testPartition) {
100       const PetscInt *sizes = NULL;
101       const PetscInt *points = NULL;
102 
103       if (rank == 0) {
104         if (dim == 2 && simplex && size == 2) {
105           sizes = triSizes_n2; points = triPoints_n2;
106         } else if (dim == 2 && simplex && size == 3) {
107           sizes = triSizes_n3; points = triPoints_n3;
108         } else if (dim == 2 && simplex && size == 4) {
109           sizes = triSizes_n4; points = triPoints_n4;
110         } else if (dim == 2 && simplex && size == 8) {
111           sizes = triSizes_n8; points = triPoints_n8;
112         } else if (dim == 2 && !simplex && size == 2) {
113           sizes = quadSizes; points = quadPoints;
114         }
115       }
116       PetscCall(PetscPartitionerSetType(part, PETSCPARTITIONERSHELL));
117       PetscCall(PetscPartitionerShellSetPartition(part, size, sizes, points));
118     }
119     PetscCall(DMPlexDistribute(*dm, overlap, NULL, &pdm));
120   } else {
121     PetscSF sf;
122 
123     PetscCall(DMPlexGetRedundantDM(*dm, &sf, &pdm));
124     if (sf) {
125       DM test;
126 
127       PetscCall(DMPlexCreate(comm,&test));
128       PetscCall(PetscObjectSetName((PetscObject)test, "Test SF-migrated Redundant Mesh"));
129       PetscCall(DMPlexMigrate(*dm, sf, test));
130       PetscCall(DMViewFromOptions(test, NULL, "-redundant_migrated_dm_view"));
131       PetscCall(DMDestroy(&test));
132     }
133     PetscCall(PetscSFDestroy(&sf));
134   }
135   if (pdm) {
136     PetscCall(DMDestroy(dm));
137     *dm  = pdm;
138   }
139   PetscCall(PetscLogStagePop());
140   PetscCall(DMSetFromOptions(*dm));
141   if (user->loadBalance) {
142     PetscPartitioner part;
143 
144     PetscCall(DMViewFromOptions(*dm, NULL, "-prelb_dm_view"));
145     PetscCall(DMPlexSetOptionsPrefix(*dm, "lb_"));
146     PetscCall(PetscLogStagePush(user->stages[STAGE_REDISTRIBUTE]));
147     PetscCall(DMPlexGetPartitioner(*dm, &part));
148     PetscCall(PetscObjectSetOptionsPrefix((PetscObject) part, "lb_"));
149     PetscCall(PetscPartitionerSetFromOptions(part));
150     if (user->testPartition) {
151       PetscInt         reSizes_n2[2]  = {2, 2};
152       PetscInt         rePoints_n2[4] = {2, 3, 0, 1};
153       if (rank) {rePoints_n2[0] = 1; rePoints_n2[1] = 2, rePoints_n2[2] = 0, rePoints_n2[3] = 3;}
154 
155       PetscCall(PetscPartitionerSetType(part, PETSCPARTITIONERSHELL));
156       PetscCall(PetscPartitionerShellSetPartition(part, size, reSizes_n2, rePoints_n2));
157     }
158     PetscCall(DMPlexSetPartitionBalance(*dm, user->partitionBalance));
159     PetscCall(DMPlexDistribute(*dm, overlap, NULL, &pdm));
160     if (pdm) {
161       PetscCall(DMDestroy(dm));
162       *dm  = pdm;
163     }
164     PetscCall(PetscLogStagePop());
165   }
166   PetscCall(PetscLogStagePush(user->stages[STAGE_REFINE]));
167   PetscCall(DMViewFromOptions(*dm, NULL, "-dm_view"));
168   PetscCall(PetscLogStagePop());
169   PetscFunctionReturn(0);
170 }
171 
172 int main(int argc, char **argv)
173 {
174   DM             dm;
175   AppCtx         user; /* user-defined work context */
176 
177   PetscCall(PetscInitialize(&argc, &argv, NULL, help));
178   PetscCall(ProcessOptions(PETSC_COMM_WORLD, &user));
179   PetscCall(CreateMesh(PETSC_COMM_WORLD, &user, &dm));
180   PetscCall(DMDestroy(&dm));
181   PetscCall(PetscFinalize());
182   return 0;
183 }
184 
185 /*TEST
186   # Parallel, no overlap tests 0-2
187   test:
188     suffix: 0
189     requires: triangle
190     args: -dm_coord_space 0 -dm_view ascii:mesh.tex:ascii_latex
191   test:
192     suffix: 1
193     requires: triangle
194     nsize: 3
195     args: -dm_coord_space 0 -test_partition -dm_view ascii::ascii_info_detail
196   test:
197     suffix: 2
198     requires: triangle
199     nsize: 8
200     args: -dm_coord_space 0 -test_partition -dm_view ascii::ascii_info_detail
201   # Parallel, level-1 overlap tests 3-4
202   test:
203     suffix: 3
204     requires: triangle
205     nsize: 3
206     args: -dm_coord_space 0 -test_partition -overlap 1 -dm_view ascii::ascii_info_detail
207   test:
208     suffix: 4
209     requires: triangle
210     nsize: 8
211     args: -dm_coord_space 0 -test_partition -overlap 1 -dm_view ascii::ascii_info_detail
212   # Parallel, level-2 overlap test 5
213   test:
214     suffix: 5
215     requires: triangle
216     nsize: 8
217     args: -dm_coord_space 0 -test_partition -overlap 2 -dm_view ascii::ascii_info_detail
218   # Parallel load balancing, test 6-7
219   test:
220     suffix: 6
221     requires: triangle
222     nsize: 2
223     args: -dm_coord_space 0 -test_partition -overlap 1 -dm_view ascii::ascii_info_detail
224   test:
225     suffix: 7
226     requires: triangle
227     nsize: 2
228     args: -dm_coord_space 0 -test_partition -overlap 1 -load_balance -dm_view ascii::ascii_info_detail
229   # Parallel redundant copying, test 8
230   test:
231     suffix: 8
232     requires: triangle
233     nsize: 2
234     args: -dm_coord_space 0 -test_redundant -redundant_migrated_dm_view ascii::ascii_info_detail -dm_view ascii::ascii_info_detail
235   test:
236     suffix: lb_0
237     requires: parmetis
238     nsize: 4
239     args: -dm_coord_space 0 -dm_plex_simplex 0 -dm_plex_box_faces 4,4 -petscpartitioner_type shell -petscpartitioner_shell_random -lb_petscpartitioner_type parmetis -load_balance -lb_petscpartitioner_view -prelb_dm_view ::load_balance -dm_view ::load_balance
240 
241   # Same tests as above, but with balancing of the shared point partition
242   test:
243     suffix: 9
244     requires: triangle
245     args: -dm_coord_space 0 -dm_view ascii:mesh.tex:ascii_latex -partition_balance
246   test:
247     suffix: 10
248     requires: triangle
249     nsize: 3
250     args: -dm_coord_space 0 -test_partition -dm_view ascii::ascii_info_detail -partition_balance
251   test:
252     suffix: 11
253     requires: triangle
254     nsize: 8
255     args: -dm_coord_space 0 -test_partition -dm_view ascii::ascii_info_detail -partition_balance
256   # Parallel, level-1 overlap tests 3-4
257   test:
258     suffix: 12
259     requires: triangle
260     nsize: 3
261     args: -dm_coord_space 0 -test_partition -overlap 1 -dm_view ascii::ascii_info_detail -partition_balance
262   test:
263     suffix: 13
264     requires: triangle
265     nsize: 8
266     args: -dm_coord_space 0 -test_partition -overlap 1 -dm_view ascii::ascii_info_detail -partition_balance
267   # Parallel, level-2 overlap test 5
268   test:
269     suffix: 14
270     requires: triangle
271     nsize: 8
272     args: -dm_coord_space 0 -test_partition -overlap 2 -dm_view ascii::ascii_info_detail -partition_balance
273   # Parallel load balancing, test 6-7
274   test:
275     suffix: 15
276     requires: triangle
277     nsize: 2
278     args: -dm_coord_space 0 -test_partition -overlap 1 -dm_view ascii::ascii_info_detail -partition_balance
279   test:
280     suffix: 16
281     requires: triangle
282     nsize: 2
283     args: -dm_coord_space 0 -test_partition -overlap 1 -load_balance -dm_view ascii::ascii_info_detail -partition_balance
284   # Parallel redundant copying, test 8
285   test:
286     suffix: 17
287     requires: triangle
288     nsize: 2
289     args: -dm_coord_space 0 -test_redundant -dm_view ascii::ascii_info_detail -partition_balance
290   test:
291     suffix: lb_1
292     requires: parmetis
293     nsize: 4
294     args: -dm_coord_space 0 -dm_plex_simplex 0 -dm_plex_box_faces 4,4 -petscpartitioner_type shell -petscpartitioner_shell_random -lb_petscpartitioner_type parmetis -load_balance -lb_petscpartitioner_view -partition_balance -prelb_dm_view ::load_balance -dm_view ::load_balance
295 TEST*/
296