xref: /petsc/src/dm/impls/plex/tests/ex12.c (revision bef158480efac06de457f7a665168877ab3c2fd7)
1 static char help[] = "Partition a mesh in parallel, perhaps with overlap\n\n";
2 
3 #include <petscdmplex.h>
4 #include <petscsf.h>
5 
6 /* Sample usage:
7 
8 Load a file in serial and distribute it on 24 processes:
9 
10   make -f ./gmakefile test globsearch="dm_impls_plex_tests-ex12_0" EXTRA_OPTIONS="-filename $PETSC_DIR/share/petsc/datafiles/meshes/squaremotor-30.exo -orig_dm_view -dm_view" NP=24
11 
12 Load a file in serial and distribute it on 24 processes using a custom partitioner:
13 
14   make -f ./gmakefile test globsearch="dm_impls_plex_tests-ex12_0" EXTRA_OPTIONS="-filename $PETSC_DIR/share/petsc/datafiles/meshes/cylinder.med -petscpartitioner_type simple -orig_dm_view -dm_view" NP=24
15 
16 Load a file in serial, distribute it, and then redistribute it on 24 processes using two different partitioners:
17 
18   make -f ./gmakefile test globsearch="dm_impls_plex_tests-ex12_0" EXTRA_OPTIONS="-filename $PETSC_DIR/share/petsc/datafiles/meshes/squaremotor-30.exo -petscpartitioner_type simple -load_balance -lb_petscpartitioner_type parmetis -orig_dm_view -dm_view" NP=24
19 
20 Load a file in serial, distribute it randomly, refine it in parallel, and then redistribute it on 24 processes using two different partitioners, and view to VTK:
21 
22   make -f ./gmakefile test globsearch="dm_impls_plex_tests-ex12_0" EXTRA_OPTIONS="-filename $PETSC_DIR/share/petsc/datafiles/meshes/squaremotor-30.exo -petscpartitioner_type shell -petscpartitioner_shell_random -dm_refine 1 -load_balance -lb_petscpartitioner_type parmetis -prelb_dm_view vtk:$PWD/prelb.vtk -dm_view vtk:$PWD/balance.vtk -dm_partition_view" NP=24
23 
24 */
25 
26 enum {STAGE_LOAD, STAGE_DISTRIBUTE, STAGE_REFINE, STAGE_REDISTRIBUTE};
27 
28 typedef struct {
29   /* Domain and mesh definition */
30   PetscInt  dim;                          /* The topological mesh dimension */
31   PetscBool cellSimplex;                  /* Use simplices or hexes */
32   PetscInt  cells[3];                     /* The initial domain division */
33   char      filename[PETSC_MAX_PATH_LEN]; /* Import mesh from file */
34   PetscInt  overlap;                      /* The cell overlap to use during partitioning */
35   PetscBool testPartition;                /* Use a fixed partitioning for testing */
36   PetscBool testRedundant;                /* Use a redundant partitioning for testing */
37   PetscBool loadBalance;                  /* Load balance via a second distribute step */
38   PetscBool partitionBalance;             /* Balance shared point partition */
39   PetscLogStage stages[4];
40 } AppCtx;
41 
42 PetscErrorCode ProcessOptions(MPI_Comm comm, AppCtx *options)
43 {
44   PetscInt       n;
45   PetscErrorCode ierr;
46 
47   PetscFunctionBegin;
48   options->dim              = 2;
49   options->cellSimplex      = PETSC_TRUE;
50   options->cells[0]         = 2;
51   options->cells[1]         = 2;
52   options->cells[2]         = 2;
53   options->filename[0]      = '\0';
54   options->overlap          = 0;
55   options->testPartition    = PETSC_FALSE;
56   options->testRedundant    = PETSC_FALSE;
57   options->loadBalance      = PETSC_FALSE;
58   options->partitionBalance = PETSC_FALSE;
59 
60   ierr = PetscOptionsBegin(comm, "", "Meshing Problem Options", "DMPLEX");CHKERRQ(ierr);
61   ierr = PetscOptionsRangeInt("-dim", "The topological mesh dimension", "ex12.c", options->dim, &options->dim, NULL,1,3);CHKERRQ(ierr);
62   ierr = PetscOptionsBool("-cell_simplex", "Use simplices if true, otherwise hexes", "ex12.c", options->cellSimplex, &options->cellSimplex, NULL);CHKERRQ(ierr);
63   n = 3;
64   ierr = PetscOptionsIntArray("-cells", "The initial mesh division", "ex12.c", options->cells, &n, NULL);CHKERRQ(ierr);
65   ierr = PetscOptionsString("-filename", "The mesh file", "ex12.c", options->filename, options->filename, sizeof(options->filename), NULL);CHKERRQ(ierr);
66   ierr = PetscOptionsBoundedInt("-overlap", "The cell overlap for partitioning", "ex12.c", options->overlap, &options->overlap, NULL,0);CHKERRQ(ierr);
67   ierr = PetscOptionsBool("-test_partition", "Use a fixed partition for testing", "ex12.c", options->testPartition, &options->testPartition, NULL);CHKERRQ(ierr);
68   ierr = PetscOptionsBool("-test_redundant", "Use a redundant partition for testing", "ex12.c", options->testRedundant, &options->testRedundant, NULL);CHKERRQ(ierr);
69   ierr = PetscOptionsBool("-load_balance", "Perform parallel load balancing in a second distribution step", "ex12.c", options->loadBalance, &options->loadBalance, NULL);CHKERRQ(ierr);
70   ierr = PetscOptionsBool("-partition_balance", "Balance the ownership of shared points", "ex12.c", options->partitionBalance, &options->partitionBalance, NULL);CHKERRQ(ierr);
71   ierr = PetscOptionsEnd();
72 
73   ierr = PetscLogStageRegister("MeshLoad",         &options->stages[STAGE_LOAD]);CHKERRQ(ierr);
74   ierr = PetscLogStageRegister("MeshDistribute",   &options->stages[STAGE_DISTRIBUTE]);CHKERRQ(ierr);
75   ierr = PetscLogStageRegister("MeshRefine",       &options->stages[STAGE_REFINE]);CHKERRQ(ierr);
76   ierr = PetscLogStageRegister("MeshRedistribute", &options->stages[STAGE_REDISTRIBUTE]);CHKERRQ(ierr);
77   PetscFunctionReturn(0);
78 }
79 
80 PetscErrorCode CreateMesh(MPI_Comm comm, AppCtx *user, DM *dm)
81 {
82   DM             pdm             = NULL;
83   PetscInt       dim             = user->dim;
84   PetscBool      cellSimplex     = user->cellSimplex;
85   const char    *filename        = user->filename;
86   PetscInt       triSizes_n2[2]  = {4, 4};
87   PetscInt       triPoints_n2[8] = {0, 1, 4, 6, 2, 3, 5, 7};
88   PetscInt       triSizes_n3[3]  = {3, 2, 3};
89   PetscInt       triPoints_n3[8] = {3, 5, 6, 1, 7, 0, 2, 4};
90   PetscInt       triSizes_n4[4]  = {2, 2, 2, 2};
91   PetscInt       triPoints_n4[8] = {0, 7, 1, 5, 2, 3, 4, 6};
92   PetscInt       triSizes_n8[8]  = {1, 1, 1, 1, 1, 1, 1, 1};
93   PetscInt       triPoints_n8[8] = {0, 1, 2, 3, 4, 5, 6, 7};
94   PetscInt       quadSizes[2]    = {2, 2};
95   PetscInt       quadPoints[4]   = {2, 3, 0, 1};
96   PetscInt       overlap         = user->overlap >= 0 ? user->overlap : 0;
97   size_t         len;
98   PetscMPIInt    rank, size;
99   PetscErrorCode ierr;
100 
101   PetscFunctionBegin;
102   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
103   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
104   ierr = PetscStrlen(filename, &len);CHKERRQ(ierr);
105   ierr = PetscLogStagePush(user->stages[STAGE_LOAD]);CHKERRQ(ierr);
106   if (len) {ierr = DMPlexCreateFromFile(comm, filename, PETSC_TRUE, dm);CHKERRQ(ierr);}
107   else     {ierr = DMPlexCreateBoxMesh(comm, dim, cellSimplex, user->cells, NULL, NULL, NULL, PETSC_TRUE, dm);CHKERRQ(ierr);}
108   ierr = DMViewFromOptions(*dm, NULL, "-orig_dm_view");CHKERRQ(ierr);
109   ierr = PetscLogStagePop();CHKERRQ(ierr);
110   ierr = PetscLogStagePush(user->stages[STAGE_DISTRIBUTE]);CHKERRQ(ierr);
111   if (!user->testRedundant) {
112     PetscPartitioner part;
113 
114     ierr = DMPlexGetPartitioner(*dm, &part);CHKERRQ(ierr);
115     ierr = PetscPartitionerSetFromOptions(part);CHKERRQ(ierr);
116     ierr = DMPlexSetPartitionBalance(*dm, user->partitionBalance);CHKERRQ(ierr);
117     if (user->testPartition) {
118       const PetscInt *sizes = NULL;
119       const PetscInt *points = NULL;
120 
121       if (!rank) {
122         if (dim == 2 && cellSimplex && size == 2) {
123           sizes = triSizes_n2; points = triPoints_n2;
124         } else if (dim == 2 && cellSimplex && size == 3) {
125           sizes = triSizes_n3; points = triPoints_n3;
126         } else if (dim == 2 && cellSimplex && size == 4) {
127           sizes = triSizes_n4; points = triPoints_n4;
128         } else if (dim == 2 && cellSimplex && size == 8) {
129           sizes = triSizes_n8; points = triPoints_n8;
130         } else if (dim == 2 && !cellSimplex && size == 2) {
131           sizes = quadSizes; points = quadPoints;
132         }
133       }
134       ierr = PetscPartitionerSetType(part, PETSCPARTITIONERSHELL);CHKERRQ(ierr);
135       ierr = PetscPartitionerShellSetPartition(part, size, sizes, points);CHKERRQ(ierr);
136     }
137     ierr = DMPlexDistribute(*dm, overlap, NULL, &pdm);CHKERRQ(ierr);
138   } else {
139     PetscSF sf;
140 
141     ierr = DMPlexGetRedundantDM(*dm, &sf, &pdm);CHKERRQ(ierr);
142     if (sf) {
143       DM test;
144 
145       ierr = DMPlexCreate(comm,&test);CHKERRQ(ierr);
146       ierr = PetscObjectSetName((PetscObject)test, "Test SF-migrated Redundant Mesh");CHKERRQ(ierr);
147       ierr = DMPlexMigrate(*dm, sf, test);CHKERRQ(ierr);
148       ierr = DMViewFromOptions(test, NULL, "-redundant_migrated_dm_view");CHKERRQ(ierr);
149       ierr = DMDestroy(&test);CHKERRQ(ierr);
150     }
151     ierr = PetscSFDestroy(&sf);CHKERRQ(ierr);
152   }
153   if (pdm) {
154     ierr = DMDestroy(dm);CHKERRQ(ierr);
155     *dm  = pdm;
156   }
157   ierr = PetscLogStagePop();CHKERRQ(ierr);
158   ierr = DMSetFromOptions(*dm);CHKERRQ(ierr);
159   if (user->loadBalance) {
160     PetscPartitioner part;
161 
162     ierr = DMViewFromOptions(*dm, NULL, "-prelb_dm_view");CHKERRQ(ierr);
163     ierr = DMPlexSetOptionsPrefix(*dm, "lb_");CHKERRQ(ierr);
164     ierr = PetscLogStagePush(user->stages[STAGE_REDISTRIBUTE]);CHKERRQ(ierr);
165     ierr = DMPlexGetPartitioner(*dm, &part);CHKERRQ(ierr);
166     ierr = PetscObjectSetOptionsPrefix((PetscObject) part, "lb_");CHKERRQ(ierr);
167     ierr = PetscPartitionerSetFromOptions(part);CHKERRQ(ierr);
168     if (user->testPartition) {
169       PetscInt         reSizes_n2[2]  = {2, 2};
170       PetscInt         rePoints_n2[4] = {2, 3, 0, 1};
171       if (rank) {rePoints_n2[0] = 1; rePoints_n2[1] = 2, rePoints_n2[2] = 0, rePoints_n2[3] = 3;}
172 
173       ierr = PetscPartitionerSetType(part, PETSCPARTITIONERSHELL);CHKERRQ(ierr);
174       ierr = PetscPartitionerShellSetPartition(part, size, reSizes_n2, rePoints_n2);CHKERRQ(ierr);
175     }
176     ierr = DMPlexSetPartitionBalance(*dm, user->partitionBalance);CHKERRQ(ierr);
177     ierr = DMPlexDistribute(*dm, overlap, NULL, &pdm);CHKERRQ(ierr);
178     if (pdm) {
179       ierr = DMDestroy(dm);CHKERRQ(ierr);
180       *dm  = pdm;
181     }
182     ierr = PetscLogStagePop();CHKERRQ(ierr);
183   }
184   ierr = PetscObjectSetName((PetscObject) *dm, cellSimplex ? "Simplicial Mesh" : "Tensor Product Mesh");CHKERRQ(ierr);
185   ierr = PetscLogStagePush(user->stages[STAGE_REFINE]);CHKERRQ(ierr);
186   ierr = DMViewFromOptions(*dm, NULL, "-dm_view");CHKERRQ(ierr);
187   ierr = PetscLogStagePop();CHKERRQ(ierr);
188   PetscFunctionReturn(0);
189 }
190 
191 
192 int main(int argc, char **argv)
193 {
194   DM             dm;
195   AppCtx         user; /* user-defined work context */
196   PetscErrorCode ierr;
197 
198   ierr = PetscInitialize(&argc, &argv, NULL, help);if (ierr) return ierr;
199   ierr = ProcessOptions(PETSC_COMM_WORLD, &user);CHKERRQ(ierr);
200   ierr = CreateMesh(PETSC_COMM_WORLD, &user, &dm);CHKERRQ(ierr);
201   ierr = DMDestroy(&dm);CHKERRQ(ierr);
202   ierr = PetscFinalize();
203   return ierr;
204 }
205 
206 /*TEST
207   # Parallel, no overlap tests 0-2
208   test:
209     suffix: 0
210     requires: triangle
211     args: -dm_view ascii:mesh.tex:ascii_latex
212   test:
213     suffix: 1
214     requires: triangle
215     nsize: 3
216     args: -test_partition -dm_view ascii::ascii_info_detail
217   test:
218     suffix: 2
219     requires: triangle
220     nsize: 8
221     args: -test_partition -dm_view ascii::ascii_info_detail
222   # Parallel, level-1 overlap tests 3-4
223   test:
224     suffix: 3
225     requires: triangle
226     nsize: 3
227     args: -test_partition -overlap 1 -dm_view ascii::ascii_info_detail
228   test:
229     suffix: 4
230     requires: triangle
231     nsize: 8
232     args: -test_partition -overlap 1 -dm_view ascii::ascii_info_detail
233   # Parallel, level-2 overlap test 5
234   test:
235     suffix: 5
236     requires: triangle
237     nsize: 8
238     args: -test_partition -overlap 2 -dm_view ascii::ascii_info_detail
239   # Parallel load balancing, test 6-7
240   test:
241     suffix: 6
242     requires: triangle
243     nsize: 2
244     args: -test_partition -overlap 1 -dm_view ascii::ascii_info_detail
245   test:
246     suffix: 7
247     requires: triangle
248     nsize: 2
249     args: -test_partition -overlap 1 -load_balance -dm_view ascii::ascii_info_detail
250   # Parallel redundant copying, test 8
251   test:
252     suffix: 8
253     requires: triangle
254     nsize: 2
255     args: -test_redundant -redundant_migrated_dm_view ascii::ascii_info_detail -dm_view ascii::ascii_info_detail
256   test:
257     suffix: lb_0
258     requires: parmetis
259     nsize: 4
260     args: -cell_simplex 0 -cells 4,4 -petscpartitioner_type shell -petscpartitioner_shell_random -lb_petscpartitioner_type parmetis -load_balance -lb_petscpartitioner_view -prelb_dm_view ::load_balance -dm_view ::load_balance
261 
262   # Same tests as above, but with balancing of the shared point partition
263   test:
264     suffix: 9
265     requires: triangle
266     args: -dm_view ascii:mesh.tex:ascii_latex -partition_balance
267   test:
268     suffix: 10
269     requires: triangle
270     nsize: 3
271     args: -test_partition -dm_view ascii::ascii_info_detail -partition_balance
272   test:
273     suffix: 11
274     requires: triangle
275     nsize: 8
276     args: -test_partition -dm_view ascii::ascii_info_detail -partition_balance
277   # Parallel, level-1 overlap tests 3-4
278   test:
279     suffix: 12
280     requires: triangle
281     nsize: 3
282     args: -test_partition -overlap 1 -dm_view ascii::ascii_info_detail -partition_balance
283   test:
284     suffix: 13
285     requires: triangle
286     nsize: 8
287     args: -test_partition -overlap 1 -dm_view ascii::ascii_info_detail -partition_balance
288   # Parallel, level-2 overlap test 5
289   test:
290     suffix: 14
291     requires: triangle
292     nsize: 8
293     args: -test_partition -overlap 2 -dm_view ascii::ascii_info_detail -partition_balance
294   # Parallel load balancing, test 6-7
295   test:
296     suffix: 15
297     requires: triangle
298     nsize: 2
299     args: -test_partition -overlap 1 -dm_view ascii::ascii_info_detail -partition_balance
300   test:
301     suffix: 16
302     requires: triangle
303     nsize: 2
304     args: -test_partition -overlap 1 -load_balance -dm_view ascii::ascii_info_detail -partition_balance
305   # Parallel redundant copying, test 8
306   test:
307     suffix: 17
308     requires: triangle
309     nsize: 2
310     args: -test_redundant -dm_view ascii::ascii_info_detail -partition_balance
311   test:
312     suffix: lb_1
313     requires: parmetis
314     nsize: 4
315     args: -cell_simplex 0 -cells 4,4 -petscpartitioner_type shell -petscpartitioner_shell_random -lb_petscpartitioner_type parmetis -load_balance -lb_petscpartitioner_view -partition_balance -prelb_dm_view ::load_balance -dm_view ::load_balance
316 TEST*/
317