xref: /petsc/src/dm/impls/plex/tests/ex12.c (revision ae8b063e4f7e8ac6d2de6909aa6d539b126b38d9)
1 static char help[] = "Partition a mesh in parallel, perhaps with overlap\n\n";
2 
3 #include <petscdmplex.h>
4 #include <petscsf.h>
5 
6 /* Sample usage:
7 
8 Load a file in serial and distribute it on 24 processes:
9 
10   make -f ./gmakefile test globsearch="dm_impls_plex_tests-ex12_0" EXTRA_OPTIONS="-filename $PETSC_DIR/share/petsc/datafiles/meshes/squaremotor-30.exo -orig_dm_view -dm_view" NP=24
11 
12 Load a file in serial and distribute it on 24 processes using a custom partitioner:
13 
14   make -f ./gmakefile test globsearch="dm_impls_plex_tests-ex12_0" EXTRA_OPTIONS="-filename $PETSC_DIR/share/petsc/datafiles/meshes/cylinder.med -petscpartitioner_type simple -orig_dm_view -dm_view" NP=24
15 
16 Load a file in serial, distribute it, and then redistribute it on 24 processes using two different partitioners:
17 
18   make -f ./gmakefile test globsearch="dm_impls_plex_tests-ex12_0" EXTRA_OPTIONS="-filename $PETSC_DIR/share/petsc/datafiles/meshes/squaremotor-30.exo -petscpartitioner_type simple -load_balance -lb_petscpartitioner_type parmetis -orig_dm_view -dm_view" NP=24
19 
20 Load a file in serial, distribute it randomly, refine it in parallel, and then redistribute it on 24 processes using two different partitioners, and view to VTK:
21 
22   make -f ./gmakefile test globsearch="dm_impls_plex_tests-ex12_0" EXTRA_OPTIONS="-filename $PETSC_DIR/share/petsc/datafiles/meshes/squaremotor-30.exo -petscpartitioner_type shell -petscpartitioner_shell_random -dm_refine 1 -load_balance -lb_petscpartitioner_type parmetis -prelb_dm_view vtk:$PWD/prelb.vtk -dm_view vtk:$PWD/balance.vtk -dm_partition_view" NP=24
23 
24 */
25 
26 enum {STAGE_LOAD, STAGE_DISTRIBUTE, STAGE_REFINE, STAGE_REDISTRIBUTE};
27 
28 typedef struct {
29   /* Domain and mesh definition */
30   PetscInt  overlap;                      /* The cell overlap to use during partitioning */
31   PetscBool testPartition;                /* Use a fixed partitioning for testing */
32   PetscBool testRedundant;                /* Use a redundant partitioning for testing */
33   PetscBool loadBalance;                  /* Load balance via a second distribute step */
34   PetscBool partitionBalance;             /* Balance shared point partition */
35   PetscLogStage stages[4];
36 } AppCtx;
37 
38 PetscErrorCode ProcessOptions(MPI_Comm comm, AppCtx *options)
39 {
40   PetscErrorCode ierr;
41 
42   PetscFunctionBegin;
43   options->overlap          = 0;
44   options->testPartition    = PETSC_FALSE;
45   options->testRedundant    = PETSC_FALSE;
46   options->loadBalance      = PETSC_FALSE;
47   options->partitionBalance = PETSC_FALSE;
48 
49   ierr = PetscOptionsBegin(comm, "", "Meshing Problem Options", "DMPLEX");CHKERRQ(ierr);
50   ierr = PetscOptionsBoundedInt("-overlap", "The cell overlap for partitioning", "ex12.c", options->overlap, &options->overlap, NULL,0);CHKERRQ(ierr);
51   ierr = PetscOptionsBool("-test_partition", "Use a fixed partition for testing", "ex12.c", options->testPartition, &options->testPartition, NULL);CHKERRQ(ierr);
52   ierr = PetscOptionsBool("-test_redundant", "Use a redundant partition for testing", "ex12.c", options->testRedundant, &options->testRedundant, NULL);CHKERRQ(ierr);
53   ierr = PetscOptionsBool("-load_balance", "Perform parallel load balancing in a second distribution step", "ex12.c", options->loadBalance, &options->loadBalance, NULL);CHKERRQ(ierr);
54   ierr = PetscOptionsBool("-partition_balance", "Balance the ownership of shared points", "ex12.c", options->partitionBalance, &options->partitionBalance, NULL);CHKERRQ(ierr);
55   ierr = PetscOptionsEnd();CHKERRQ(ierr);
56 
57   ierr = PetscLogStageRegister("MeshLoad",         &options->stages[STAGE_LOAD]);CHKERRQ(ierr);
58   ierr = PetscLogStageRegister("MeshDistribute",   &options->stages[STAGE_DISTRIBUTE]);CHKERRQ(ierr);
59   ierr = PetscLogStageRegister("MeshRefine",       &options->stages[STAGE_REFINE]);CHKERRQ(ierr);
60   ierr = PetscLogStageRegister("MeshRedistribute", &options->stages[STAGE_REDISTRIBUTE]);CHKERRQ(ierr);
61   PetscFunctionReturn(0);
62 }
63 
64 PetscErrorCode CreateMesh(MPI_Comm comm, AppCtx *user, DM *dm)
65 {
66   DM             pdm             = NULL;
67   PetscInt       triSizes_n2[2]  = {4, 4};
68   PetscInt       triPoints_n2[8] = {0, 1, 4, 6, 2, 3, 5, 7};
69   PetscInt       triSizes_n3[3]  = {3, 2, 3};
70   PetscInt       triPoints_n3[8] = {3, 5, 6, 1, 7, 0, 2, 4};
71   PetscInt       triSizes_n4[4]  = {2, 2, 2, 2};
72   PetscInt       triPoints_n4[8] = {0, 7, 1, 5, 2, 3, 4, 6};
73   PetscInt       triSizes_n8[8]  = {1, 1, 1, 1, 1, 1, 1, 1};
74   PetscInt       triPoints_n8[8] = {0, 1, 2, 3, 4, 5, 6, 7};
75   PetscInt       quadSizes[2]    = {2, 2};
76   PetscInt       quadPoints[4]   = {2, 3, 0, 1};
77   PetscInt       overlap         = user->overlap >= 0 ? user->overlap : 0;
78   PetscInt       dim;
79   PetscBool      simplex;
80   PetscMPIInt    rank, size;
81   PetscErrorCode ierr;
82 
83   PetscFunctionBegin;
84   ierr = MPI_Comm_rank(comm, &rank);CHKERRMPI(ierr);
85   ierr = MPI_Comm_size(comm, &size);CHKERRMPI(ierr);
86   ierr = PetscLogStagePush(user->stages[STAGE_LOAD]);CHKERRQ(ierr);
87   ierr = DMCreate(comm, dm);CHKERRQ(ierr);
88   ierr = DMSetType(*dm, DMPLEX);CHKERRQ(ierr);
89   ierr = DMPlexDistributeSetDefault(*dm, PETSC_FALSE);CHKERRQ(ierr);
90   ierr = DMSetFromOptions(*dm);CHKERRQ(ierr);
91   ierr = DMViewFromOptions(*dm, NULL, "-orig_dm_view");CHKERRQ(ierr);
92   ierr = PetscLogStagePop();CHKERRQ(ierr);
93   ierr = DMGetDimension(*dm, &dim);CHKERRQ(ierr);
94   ierr = DMPlexIsSimplex(*dm, &simplex);CHKERRQ(ierr);
95   ierr = PetscLogStagePush(user->stages[STAGE_DISTRIBUTE]);CHKERRQ(ierr);
96   if (!user->testRedundant) {
97     PetscPartitioner part;
98 
99     ierr = DMPlexGetPartitioner(*dm, &part);CHKERRQ(ierr);
100     ierr = PetscPartitionerSetFromOptions(part);CHKERRQ(ierr);
101     ierr = DMPlexSetPartitionBalance(*dm, user->partitionBalance);CHKERRQ(ierr);
102     if (user->testPartition) {
103       const PetscInt *sizes = NULL;
104       const PetscInt *points = NULL;
105 
106       if (rank == 0) {
107         if (dim == 2 && simplex && size == 2) {
108           sizes = triSizes_n2; points = triPoints_n2;
109         } else if (dim == 2 && simplex && size == 3) {
110           sizes = triSizes_n3; points = triPoints_n3;
111         } else if (dim == 2 && simplex && size == 4) {
112           sizes = triSizes_n4; points = triPoints_n4;
113         } else if (dim == 2 && simplex && size == 8) {
114           sizes = triSizes_n8; points = triPoints_n8;
115         } else if (dim == 2 && !simplex && size == 2) {
116           sizes = quadSizes; points = quadPoints;
117         }
118       }
119       ierr = PetscPartitionerSetType(part, PETSCPARTITIONERSHELL);CHKERRQ(ierr);
120       ierr = PetscPartitionerShellSetPartition(part, size, sizes, points);CHKERRQ(ierr);
121     }
122     ierr = DMPlexDistribute(*dm, overlap, NULL, &pdm);CHKERRQ(ierr);
123   } else {
124     PetscSF sf;
125 
126     ierr = DMPlexGetRedundantDM(*dm, &sf, &pdm);CHKERRQ(ierr);
127     if (sf) {
128       DM test;
129 
130       ierr = DMPlexCreate(comm,&test);CHKERRQ(ierr);
131       ierr = PetscObjectSetName((PetscObject)test, "Test SF-migrated Redundant Mesh");CHKERRQ(ierr);
132       ierr = DMPlexMigrate(*dm, sf, test);CHKERRQ(ierr);
133       ierr = DMViewFromOptions(test, NULL, "-redundant_migrated_dm_view");CHKERRQ(ierr);
134       ierr = DMDestroy(&test);CHKERRQ(ierr);
135     }
136     ierr = PetscSFDestroy(&sf);CHKERRQ(ierr);
137   }
138   if (pdm) {
139     ierr = DMDestroy(dm);CHKERRQ(ierr);
140     *dm  = pdm;
141   }
142   ierr = PetscLogStagePop();CHKERRQ(ierr);
143   ierr = DMSetFromOptions(*dm);CHKERRQ(ierr);
144   if (user->loadBalance) {
145     PetscPartitioner part;
146 
147     ierr = DMViewFromOptions(*dm, NULL, "-prelb_dm_view");CHKERRQ(ierr);
148     ierr = DMPlexSetOptionsPrefix(*dm, "lb_");CHKERRQ(ierr);
149     ierr = PetscLogStagePush(user->stages[STAGE_REDISTRIBUTE]);CHKERRQ(ierr);
150     ierr = DMPlexGetPartitioner(*dm, &part);CHKERRQ(ierr);
151     ierr = PetscObjectSetOptionsPrefix((PetscObject) part, "lb_");CHKERRQ(ierr);
152     ierr = PetscPartitionerSetFromOptions(part);CHKERRQ(ierr);
153     if (user->testPartition) {
154       PetscInt         reSizes_n2[2]  = {2, 2};
155       PetscInt         rePoints_n2[4] = {2, 3, 0, 1};
156       if (rank) {rePoints_n2[0] = 1; rePoints_n2[1] = 2, rePoints_n2[2] = 0, rePoints_n2[3] = 3;}
157 
158       ierr = PetscPartitionerSetType(part, PETSCPARTITIONERSHELL);CHKERRQ(ierr);
159       ierr = PetscPartitionerShellSetPartition(part, size, reSizes_n2, rePoints_n2);CHKERRQ(ierr);
160     }
161     ierr = DMPlexSetPartitionBalance(*dm, user->partitionBalance);CHKERRQ(ierr);
162     ierr = DMPlexDistribute(*dm, overlap, NULL, &pdm);CHKERRQ(ierr);
163     if (pdm) {
164       ierr = DMDestroy(dm);CHKERRQ(ierr);
165       *dm  = pdm;
166     }
167     ierr = PetscLogStagePop();CHKERRQ(ierr);
168   }
169   ierr = PetscLogStagePush(user->stages[STAGE_REFINE]);CHKERRQ(ierr);
170   ierr = DMViewFromOptions(*dm, NULL, "-dm_view");CHKERRQ(ierr);
171   ierr = PetscLogStagePop();CHKERRQ(ierr);
172   PetscFunctionReturn(0);
173 }
174 
175 int main(int argc, char **argv)
176 {
177   DM             dm;
178   AppCtx         user; /* user-defined work context */
179   PetscErrorCode ierr;
180 
181   ierr = PetscInitialize(&argc, &argv, NULL, help);if (ierr) return ierr;
182   ierr = ProcessOptions(PETSC_COMM_WORLD, &user);CHKERRQ(ierr);
183   ierr = CreateMesh(PETSC_COMM_WORLD, &user, &dm);CHKERRQ(ierr);
184   ierr = DMDestroy(&dm);CHKERRQ(ierr);
185   ierr = PetscFinalize();
186   return ierr;
187 }
188 
189 /*TEST
190   # Parallel, no overlap tests 0-2
191   test:
192     suffix: 0
193     requires: triangle
194     args: -dm_coord_space 0 -dm_view ascii:mesh.tex:ascii_latex
195   test:
196     suffix: 1
197     requires: triangle
198     nsize: 3
199     args: -dm_coord_space 0 -test_partition -dm_view ascii::ascii_info_detail
200   test:
201     suffix: 2
202     requires: triangle
203     nsize: 8
204     args: -dm_coord_space 0 -test_partition -dm_view ascii::ascii_info_detail
205   # Parallel, level-1 overlap tests 3-4
206   test:
207     suffix: 3
208     requires: triangle
209     nsize: 3
210     args: -dm_coord_space 0 -test_partition -overlap 1 -dm_view ascii::ascii_info_detail
211   test:
212     suffix: 4
213     requires: triangle
214     nsize: 8
215     args: -dm_coord_space 0 -test_partition -overlap 1 -dm_view ascii::ascii_info_detail
216   # Parallel, level-2 overlap test 5
217   test:
218     suffix: 5
219     requires: triangle
220     nsize: 8
221     args: -dm_coord_space 0 -test_partition -overlap 2 -dm_view ascii::ascii_info_detail
222   # Parallel load balancing, test 6-7
223   test:
224     suffix: 6
225     requires: triangle
226     nsize: 2
227     args: -dm_coord_space 0 -test_partition -overlap 1 -dm_view ascii::ascii_info_detail
228   test:
229     suffix: 7
230     requires: triangle
231     nsize: 2
232     args: -dm_coord_space 0 -test_partition -overlap 1 -load_balance -dm_view ascii::ascii_info_detail
233   # Parallel redundant copying, test 8
234   test:
235     suffix: 8
236     requires: triangle
237     nsize: 2
238     args: -dm_coord_space 0 -test_redundant -redundant_migrated_dm_view ascii::ascii_info_detail -dm_view ascii::ascii_info_detail
239   test:
240     suffix: lb_0
241     requires: parmetis
242     nsize: 4
243     args: -dm_coord_space 0 -dm_plex_simplex 0 -dm_plex_box_faces 4,4 -petscpartitioner_type shell -petscpartitioner_shell_random -lb_petscpartitioner_type parmetis -load_balance -lb_petscpartitioner_view -prelb_dm_view ::load_balance -dm_view ::load_balance
244 
245   # Same tests as above, but with balancing of the shared point partition
246   test:
247     suffix: 9
248     requires: triangle
249     args: -dm_coord_space 0 -dm_view ascii:mesh.tex:ascii_latex -partition_balance
250   test:
251     suffix: 10
252     requires: triangle
253     nsize: 3
254     args: -dm_coord_space 0 -test_partition -dm_view ascii::ascii_info_detail -partition_balance
255   test:
256     suffix: 11
257     requires: triangle
258     nsize: 8
259     args: -dm_coord_space 0 -test_partition -dm_view ascii::ascii_info_detail -partition_balance
260   # Parallel, level-1 overlap tests 3-4
261   test:
262     suffix: 12
263     requires: triangle
264     nsize: 3
265     args: -dm_coord_space 0 -test_partition -overlap 1 -dm_view ascii::ascii_info_detail -partition_balance
266   test:
267     suffix: 13
268     requires: triangle
269     nsize: 8
270     args: -dm_coord_space 0 -test_partition -overlap 1 -dm_view ascii::ascii_info_detail -partition_balance
271   # Parallel, level-2 overlap test 5
272   test:
273     suffix: 14
274     requires: triangle
275     nsize: 8
276     args: -dm_coord_space 0 -test_partition -overlap 2 -dm_view ascii::ascii_info_detail -partition_balance
277   # Parallel load balancing, test 6-7
278   test:
279     suffix: 15
280     requires: triangle
281     nsize: 2
282     args: -dm_coord_space 0 -test_partition -overlap 1 -dm_view ascii::ascii_info_detail -partition_balance
283   test:
284     suffix: 16
285     requires: triangle
286     nsize: 2
287     args: -dm_coord_space 0 -test_partition -overlap 1 -load_balance -dm_view ascii::ascii_info_detail -partition_balance
288   # Parallel redundant copying, test 8
289   test:
290     suffix: 17
291     requires: triangle
292     nsize: 2
293     args: -dm_coord_space 0 -test_redundant -dm_view ascii::ascii_info_detail -partition_balance
294   test:
295     suffix: lb_1
296     requires: parmetis
297     nsize: 4
298     args: -dm_coord_space 0 -dm_plex_simplex 0 -dm_plex_box_faces 4,4 -petscpartitioner_type shell -petscpartitioner_shell_random -lb_petscpartitioner_type parmetis -load_balance -lb_petscpartitioner_view -partition_balance -prelb_dm_view ::load_balance -dm_view ::load_balance
299 TEST*/
300