xref: /petsc/src/dm/impls/plex/tests/ex12.c (revision a69119a591a03a9d906b29c0a4e9802e4d7c9795)
1 static char help[] = "Partition a mesh in parallel, perhaps with overlap\n\n";
2 
3 #include <petscdmplex.h>
4 #include <petscsf.h>
5 
6 /* Sample usage:
7 
8 Load a file in serial and distribute it on 24 processes:
9 
10   make -f ./gmakefile test globsearch="dm_impls_plex_tests-ex12_0" EXTRA_OPTIONS="-filename $PETSC_DIR/share/petsc/datafiles/meshes/squaremotor-30.exo -orig_dm_view -dm_view" NP=24
11 
12 Load a file in serial and distribute it on 24 processes using a custom partitioner:
13 
14   make -f ./gmakefile test globsearch="dm_impls_plex_tests-ex12_0" EXTRA_OPTIONS="-filename $PETSC_DIR/share/petsc/datafiles/meshes/cylinder.med -petscpartitioner_type simple -orig_dm_view -dm_view" NP=24
15 
16 Load a file in serial, distribute it, and then redistribute it on 24 processes using two different partitioners:
17 
18   make -f ./gmakefile test globsearch="dm_impls_plex_tests-ex12_0" EXTRA_OPTIONS="-filename $PETSC_DIR/share/petsc/datafiles/meshes/squaremotor-30.exo -petscpartitioner_type simple -load_balance -lb_petscpartitioner_type parmetis -orig_dm_view -dm_view" NP=24
19 
20 Load a file in serial, distribute it randomly, refine it in parallel, and then redistribute it on 24 processes using two different partitioners, and view to VTK:
21 
22   make -f ./gmakefile test globsearch="dm_impls_plex_tests-ex12_0" EXTRA_OPTIONS="-filename $PETSC_DIR/share/petsc/datafiles/meshes/squaremotor-30.exo -petscpartitioner_type shell -petscpartitioner_shell_random -dm_refine 1 -load_balance -lb_petscpartitioner_type parmetis -prelb_dm_view vtk:$PWD/prelb.vtk -dm_view vtk:$PWD/balance.vtk -dm_partition_view" NP=24
23 
24 */
25 
26 enum {
27   STAGE_LOAD,
28   STAGE_DISTRIBUTE,
29   STAGE_REFINE,
30   STAGE_REDISTRIBUTE
31 };
32 
33 typedef struct {
34   /* Domain and mesh definition */
35   PetscInt      overlap;          /* The cell overlap to use during partitioning */
36   PetscBool     testPartition;    /* Use a fixed partitioning for testing */
37   PetscBool     testRedundant;    /* Use a redundant partitioning for testing */
38   PetscBool     loadBalance;      /* Load balance via a second distribute step */
39   PetscBool     partitionBalance; /* Balance shared point partition */
40   PetscLogStage stages[4];
41 } AppCtx;
42 
43 PetscErrorCode ProcessOptions(MPI_Comm comm, AppCtx *options) {
44   PetscFunctionBegin;
45   options->overlap          = 0;
46   options->testPartition    = PETSC_FALSE;
47   options->testRedundant    = PETSC_FALSE;
48   options->loadBalance      = PETSC_FALSE;
49   options->partitionBalance = PETSC_FALSE;
50 
51   PetscOptionsBegin(comm, "", "Meshing Problem Options", "DMPLEX");
52   PetscCall(PetscOptionsBoundedInt("-overlap", "The cell overlap for partitioning", "ex12.c", options->overlap, &options->overlap, NULL, 0));
53   PetscCall(PetscOptionsBool("-test_partition", "Use a fixed partition for testing", "ex12.c", options->testPartition, &options->testPartition, NULL));
54   PetscCall(PetscOptionsBool("-test_redundant", "Use a redundant partition for testing", "ex12.c", options->testRedundant, &options->testRedundant, NULL));
55   PetscCall(PetscOptionsBool("-load_balance", "Perform parallel load balancing in a second distribution step", "ex12.c", options->loadBalance, &options->loadBalance, NULL));
56   PetscCall(PetscOptionsBool("-partition_balance", "Balance the ownership of shared points", "ex12.c", options->partitionBalance, &options->partitionBalance, NULL));
57   PetscOptionsEnd();
58 
59   PetscCall(PetscLogStageRegister("MeshLoad", &options->stages[STAGE_LOAD]));
60   PetscCall(PetscLogStageRegister("MeshDistribute", &options->stages[STAGE_DISTRIBUTE]));
61   PetscCall(PetscLogStageRegister("MeshRefine", &options->stages[STAGE_REFINE]));
62   PetscCall(PetscLogStageRegister("MeshRedistribute", &options->stages[STAGE_REDISTRIBUTE]));
63   PetscFunctionReturn(0);
64 }
65 
66 PetscErrorCode CreateMesh(MPI_Comm comm, AppCtx *user, DM *dm) {
67   DM          pdm             = NULL;
68   PetscInt    triSizes_n2[2]  = {4, 4};
69   PetscInt    triPoints_n2[8] = {0, 1, 4, 6, 2, 3, 5, 7};
70   PetscInt    triSizes_n3[3]  = {3, 2, 3};
71   PetscInt    triPoints_n3[8] = {3, 5, 6, 1, 7, 0, 2, 4};
72   PetscInt    triSizes_n4[4]  = {2, 2, 2, 2};
73   PetscInt    triPoints_n4[8] = {0, 7, 1, 5, 2, 3, 4, 6};
74   PetscInt    triSizes_n8[8]  = {1, 1, 1, 1, 1, 1, 1, 1};
75   PetscInt    triPoints_n8[8] = {0, 1, 2, 3, 4, 5, 6, 7};
76   PetscInt    quadSizes[2]    = {2, 2};
77   PetscInt    quadPoints[4]   = {2, 3, 0, 1};
78   PetscInt    overlap         = user->overlap >= 0 ? user->overlap : 0;
79   PetscInt    dim;
80   PetscBool   simplex;
81   PetscMPIInt rank, size;
82 
83   PetscFunctionBegin;
84   PetscCallMPI(MPI_Comm_rank(comm, &rank));
85   PetscCallMPI(MPI_Comm_size(comm, &size));
86   PetscCall(PetscLogStagePush(user->stages[STAGE_LOAD]));
87   PetscCall(DMCreate(comm, dm));
88   PetscCall(DMSetType(*dm, DMPLEX));
89   PetscCall(DMPlexDistributeSetDefault(*dm, PETSC_FALSE));
90   PetscCall(DMSetFromOptions(*dm));
91   PetscCall(DMViewFromOptions(*dm, NULL, "-orig_dm_view"));
92   PetscCall(PetscLogStagePop());
93   PetscCall(DMGetDimension(*dm, &dim));
94   PetscCall(DMPlexIsSimplex(*dm, &simplex));
95   PetscCall(PetscLogStagePush(user->stages[STAGE_DISTRIBUTE]));
96   if (!user->testRedundant) {
97     PetscPartitioner part;
98 
99     PetscCall(DMPlexGetPartitioner(*dm, &part));
100     PetscCall(PetscPartitionerSetFromOptions(part));
101     PetscCall(DMPlexSetPartitionBalance(*dm, user->partitionBalance));
102     if (user->testPartition) {
103       const PetscInt *sizes  = NULL;
104       const PetscInt *points = NULL;
105 
106       if (rank == 0) {
107         if (dim == 2 && simplex && size == 2) {
108           sizes  = triSizes_n2;
109           points = triPoints_n2;
110         } else if (dim == 2 && simplex && size == 3) {
111           sizes  = triSizes_n3;
112           points = triPoints_n3;
113         } else if (dim == 2 && simplex && size == 4) {
114           sizes  = triSizes_n4;
115           points = triPoints_n4;
116         } else if (dim == 2 && simplex && size == 8) {
117           sizes  = triSizes_n8;
118           points = triPoints_n8;
119         } else if (dim == 2 && !simplex && size == 2) {
120           sizes  = quadSizes;
121           points = quadPoints;
122         }
123       }
124       PetscCall(PetscPartitionerSetType(part, PETSCPARTITIONERSHELL));
125       PetscCall(PetscPartitionerShellSetPartition(part, size, sizes, points));
126     }
127     PetscCall(DMPlexDistribute(*dm, overlap, NULL, &pdm));
128   } else {
129     PetscSF sf;
130 
131     PetscCall(DMPlexGetRedundantDM(*dm, &sf, &pdm));
132     if (sf) {
133       DM test;
134 
135       PetscCall(DMPlexCreate(comm, &test));
136       PetscCall(PetscObjectSetName((PetscObject)test, "Test SF-migrated Redundant Mesh"));
137       PetscCall(DMPlexMigrate(*dm, sf, test));
138       PetscCall(DMViewFromOptions(test, NULL, "-redundant_migrated_dm_view"));
139       PetscCall(DMDestroy(&test));
140     }
141     PetscCall(PetscSFDestroy(&sf));
142   }
143   if (pdm) {
144     PetscCall(DMDestroy(dm));
145     *dm = pdm;
146   }
147   PetscCall(PetscLogStagePop());
148   PetscCall(DMSetFromOptions(*dm));
149   if (user->loadBalance) {
150     PetscPartitioner part;
151 
152     PetscCall(DMViewFromOptions(*dm, NULL, "-prelb_dm_view"));
153     PetscCall(DMPlexSetOptionsPrefix(*dm, "lb_"));
154     PetscCall(PetscLogStagePush(user->stages[STAGE_REDISTRIBUTE]));
155     PetscCall(DMPlexGetPartitioner(*dm, &part));
156     PetscCall(PetscObjectSetOptionsPrefix((PetscObject)part, "lb_"));
157     PetscCall(PetscPartitionerSetFromOptions(part));
158     if (user->testPartition) {
159       PetscInt reSizes_n2[2]  = {2, 2};
160       PetscInt rePoints_n2[4] = {2, 3, 0, 1};
161       if (rank) {
162         rePoints_n2[0] = 1;
163         rePoints_n2[1] = 2, rePoints_n2[2] = 0, rePoints_n2[3] = 3;
164       }
165 
166       PetscCall(PetscPartitionerSetType(part, PETSCPARTITIONERSHELL));
167       PetscCall(PetscPartitionerShellSetPartition(part, size, reSizes_n2, rePoints_n2));
168     }
169     PetscCall(DMPlexSetPartitionBalance(*dm, user->partitionBalance));
170     PetscCall(DMPlexDistribute(*dm, overlap, NULL, &pdm));
171     if (pdm) {
172       PetscCall(DMDestroy(dm));
173       *dm = pdm;
174     }
175     PetscCall(PetscLogStagePop());
176   }
177   PetscCall(PetscLogStagePush(user->stages[STAGE_REFINE]));
178   PetscCall(DMViewFromOptions(*dm, NULL, "-dm_view"));
179   PetscCall(PetscLogStagePop());
180   PetscFunctionReturn(0);
181 }
182 
183 int main(int argc, char **argv) {
184   DM     dm;
185   AppCtx user; /* user-defined work context */
186 
187   PetscFunctionBeginUser;
188   PetscCall(PetscInitialize(&argc, &argv, NULL, help));
189   PetscCall(ProcessOptions(PETSC_COMM_WORLD, &user));
190   PetscCall(CreateMesh(PETSC_COMM_WORLD, &user, &dm));
191   PetscCall(DMDestroy(&dm));
192   PetscCall(PetscFinalize());
193   return 0;
194 }
195 
196 /*TEST
197   # Parallel, no overlap tests 0-2
198   test:
199     suffix: 0
200     requires: triangle
201     args: -dm_coord_space 0 -dm_view ascii:mesh.tex:ascii_latex
202   test:
203     suffix: 1
204     requires: triangle
205     nsize: 3
206     args: -dm_coord_space 0 -test_partition -dm_view ascii::ascii_info_detail
207   test:
208     suffix: 2
209     requires: triangle
210     nsize: 8
211     args: -dm_coord_space 0 -test_partition -dm_view ascii::ascii_info_detail
212   # Parallel, level-1 overlap tests 3-4
213   test:
214     suffix: 3
215     requires: triangle
216     nsize: 3
217     args: -dm_coord_space 0 -test_partition -overlap 1 -dm_view ascii::ascii_info_detail
218   test:
219     suffix: 4
220     requires: triangle
221     nsize: 8
222     args: -dm_coord_space 0 -test_partition -overlap 1 -dm_view ascii::ascii_info_detail
223   # Parallel, level-2 overlap test 5
224   test:
225     suffix: 5
226     requires: triangle
227     nsize: 8
228     args: -dm_coord_space 0 -test_partition -overlap 2 -dm_view ascii::ascii_info_detail
229   # Parallel load balancing, test 6-7
230   test:
231     suffix: 6
232     requires: triangle
233     nsize: 2
234     args: -dm_coord_space 0 -test_partition -overlap 1 -dm_view ascii::ascii_info_detail
235   test:
236     suffix: 7
237     requires: triangle
238     nsize: 2
239     args: -dm_coord_space 0 -test_partition -overlap 1 -load_balance -dm_view ascii::ascii_info_detail
240   # Parallel redundant copying, test 8
241   test:
242     suffix: 8
243     requires: triangle
244     nsize: 2
245     args: -dm_coord_space 0 -test_redundant -redundant_migrated_dm_view ascii::ascii_info_detail -dm_view ascii::ascii_info_detail
246   test:
247     suffix: lb_0
248     requires: parmetis
249     nsize: 4
250     args: -dm_coord_space 0 -dm_plex_simplex 0 -dm_plex_box_faces 4,4 -petscpartitioner_type shell -petscpartitioner_shell_random -lb_petscpartitioner_type parmetis -load_balance -lb_petscpartitioner_view -prelb_dm_view ::load_balance -dm_view ::load_balance
251 
252   # Same tests as above, but with balancing of the shared point partition
253   test:
254     suffix: 9
255     requires: triangle
256     args: -dm_coord_space 0 -dm_view ascii:mesh.tex:ascii_latex -partition_balance
257   test:
258     suffix: 10
259     requires: triangle
260     nsize: 3
261     args: -dm_coord_space 0 -test_partition -dm_view ascii::ascii_info_detail -partition_balance
262   test:
263     suffix: 11
264     requires: triangle
265     nsize: 8
266     args: -dm_coord_space 0 -test_partition -dm_view ascii::ascii_info_detail -partition_balance
267   # Parallel, level-1 overlap tests 3-4
268   test:
269     suffix: 12
270     requires: triangle
271     nsize: 3
272     args: -dm_coord_space 0 -test_partition -overlap 1 -dm_view ascii::ascii_info_detail -partition_balance
273   test:
274     suffix: 13
275     requires: triangle
276     nsize: 8
277     args: -dm_coord_space 0 -test_partition -overlap 1 -dm_view ascii::ascii_info_detail -partition_balance
278   # Parallel, level-2 overlap test 5
279   test:
280     suffix: 14
281     requires: triangle
282     nsize: 8
283     args: -dm_coord_space 0 -test_partition -overlap 2 -dm_view ascii::ascii_info_detail -partition_balance
284   # Parallel load balancing, test 6-7
285   test:
286     suffix: 15
287     requires: triangle
288     nsize: 2
289     args: -dm_coord_space 0 -test_partition -overlap 1 -dm_view ascii::ascii_info_detail -partition_balance
290   test:
291     suffix: 16
292     requires: triangle
293     nsize: 2
294     args: -dm_coord_space 0 -test_partition -overlap 1 -load_balance -dm_view ascii::ascii_info_detail -partition_balance
295   # Parallel redundant copying, test 8
296   test:
297     suffix: 17
298     requires: triangle
299     nsize: 2
300     args: -dm_coord_space 0 -test_redundant -dm_view ascii::ascii_info_detail -partition_balance
301   test:
302     suffix: lb_1
303     requires: parmetis
304     nsize: 4
305     args: -dm_coord_space 0 -dm_plex_simplex 0 -dm_plex_box_faces 4,4 -petscpartitioner_type shell -petscpartitioner_shell_random -lb_petscpartitioner_type parmetis -load_balance -lb_petscpartitioner_view -partition_balance -prelb_dm_view ::load_balance -dm_view ::load_balance
306 TEST*/
307