xref: /petsc/src/dm/impls/plex/tests/ex24.c (revision 9371c9d470a9602b6d10a8bf50c9b2280a79e45a)
1 static char help[]     = "Test that MatPartitioning and PetscPartitioner interfaces are equivalent when using PETSCPARTITIONERMATPARTITIONING\n\n";
2 static char FILENAME[] = "ex24.c";
3 
4 #include <petscdmplex.h>
5 #include <petscviewerhdf5.h>
6 
7 #if defined(PETSC_HAVE_PTSCOTCH)
8 EXTERN_C_BEGIN
9 #include <ptscotch.h>
10 EXTERN_C_END
11 #endif
12 
13 typedef struct {
14   PetscBool compare_is; /* Compare ISs and PetscSections */
15   PetscBool compare_dm; /* Compare DM */
16   PetscBool tpw;        /* Use target partition weights */
17   char      partitioning[64];
18   char      repartitioning[64];
19 } AppCtx;
20 
21 static PetscErrorCode ProcessOptions(MPI_Comm comm, AppCtx *options) {
22   PetscBool repartition = PETSC_TRUE;
23 
24   PetscFunctionBegin;
25   options->compare_is = PETSC_FALSE;
26   options->compare_dm = PETSC_FALSE;
27 
28   PetscOptionsBegin(comm, "", "Meshing Interpolation Test Options", "DMPLEX");
29   PetscCall(PetscOptionsBool("-compare_is", "Compare ISs and PetscSections?", FILENAME, options->compare_is, &options->compare_is, NULL));
30   PetscCall(PetscOptionsBool("-compare_dm", "Compare DMs?", FILENAME, options->compare_dm, &options->compare_dm, NULL));
31   PetscCall(PetscStrncpy(options->partitioning, MATPARTITIONINGPARMETIS, sizeof(options->partitioning)));
32   PetscCall(PetscOptionsString("-partitioning", "The mat partitioning type to test", "None", options->partitioning, options->partitioning, sizeof(options->partitioning), NULL));
33   PetscCall(PetscOptionsBool("-repartition", "Partition again after the first partition?", FILENAME, repartition, &repartition, NULL));
34   if (repartition) {
35     PetscCall(PetscStrncpy(options->repartitioning, MATPARTITIONINGPARMETIS, 64));
36     PetscCall(PetscOptionsString("-repartitioning", "The mat partitioning type to test (second partitioning)", "None", options->repartitioning, options->repartitioning, sizeof(options->repartitioning), NULL));
37   } else {
38     options->repartitioning[0] = '\0';
39   }
40   PetscCall(PetscOptionsBool("-tpweight", "Use target partition weights", FILENAME, options->tpw, &options->tpw, NULL));
41   PetscOptionsEnd();
42   PetscFunctionReturn(0);
43 }
44 
45 static PetscErrorCode ScotchResetRandomSeed() {
46   PetscFunctionBegin;
47 #if defined(PETSC_HAVE_PTSCOTCH)
48   SCOTCH_randomReset();
49 #endif
50   PetscFunctionReturn(0);
51 }
52 
53 static PetscErrorCode CreateMesh(MPI_Comm comm, AppCtx *user, DM *dm) {
54   PetscFunctionBegin;
55   PetscCall(DMCreate(comm, dm));
56   PetscCall(DMSetType(*dm, DMPLEX));
57   PetscCall(DMPlexDistributeSetDefault(*dm, PETSC_FALSE));
58   PetscCall(DMSetFromOptions(*dm));
59   PetscCall(DMViewFromOptions(*dm, NULL, "-dm_view"));
60   PetscFunctionReturn(0);
61 }
62 
63 int main(int argc, char **argv) {
64   MPI_Comm               comm;
65   DM                     dm1, dm2, dmdist1, dmdist2;
66   DMPlexInterpolatedFlag interp;
67   MatPartitioning        mp;
68   PetscPartitioner       part1, part2;
69   AppCtx                 user;
70   IS                     is1 = NULL, is2 = NULL;
71   IS                     is1g, is2g;
72   PetscSection           s1 = NULL, s2 = NULL, tpws = NULL;
73   PetscInt               i;
74   PetscBool              flg;
75   PetscMPIInt            size;
76 
77   PetscFunctionBeginUser;
78   PetscCall(PetscInitialize(&argc, &argv, NULL, help));
79   comm = PETSC_COMM_WORLD;
80   PetscCallMPI(MPI_Comm_size(comm, &size));
81   PetscCall(ProcessOptions(comm, &user));
82   PetscCall(CreateMesh(comm, &user, &dm1));
83   PetscCall(CreateMesh(comm, &user, &dm2));
84 
85   if (user.tpw) {
86     PetscCall(PetscSectionCreate(comm, &tpws));
87     PetscCall(PetscSectionSetChart(tpws, 0, size));
88     for (i = 0; i < size; i++) {
89       PetscInt tdof = i % 2 ? 2 * i - 1 : i + 2;
90       PetscCall(PetscSectionSetDof(tpws, i, tdof));
91     }
92     if (size > 1) { /* test zero tpw entry */
93       PetscCall(PetscSectionSetDof(tpws, 0, 0));
94     }
95     PetscCall(PetscSectionSetUp(tpws));
96   }
97 
98   /* partition dm1 using PETSCPARTITIONERPARMETIS */
99   PetscCall(ScotchResetRandomSeed());
100   PetscCall(DMPlexGetPartitioner(dm1, &part1));
101   PetscCall(PetscObjectSetOptionsPrefix((PetscObject)part1, "p1_"));
102   PetscCall(PetscPartitionerSetType(part1, user.partitioning));
103   PetscCall(PetscPartitionerSetFromOptions(part1));
104   PetscCall(PetscSectionCreate(comm, &s1));
105   PetscCall(PetscPartitionerDMPlexPartition(part1, dm1, tpws, s1, &is1));
106 
107   /* partition dm2 using PETSCPARTITIONERMATPARTITIONING with MATPARTITIONINGPARMETIS */
108   PetscCall(ScotchResetRandomSeed());
109   PetscCall(DMPlexGetPartitioner(dm2, &part2));
110   PetscCall(PetscObjectSetOptionsPrefix((PetscObject)part2, "p2_"));
111   PetscCall(PetscPartitionerSetType(part2, PETSCPARTITIONERMATPARTITIONING));
112   PetscCall(PetscPartitionerMatPartitioningGetMatPartitioning(part2, &mp));
113   PetscCall(MatPartitioningSetType(mp, user.partitioning));
114   PetscCall(PetscPartitionerSetFromOptions(part2));
115   PetscCall(PetscSectionCreate(comm, &s2));
116   PetscCall(PetscPartitionerDMPlexPartition(part2, dm2, tpws, s2, &is2));
117 
118   PetscCall(ISOnComm(is1, comm, PETSC_USE_POINTER, &is1g));
119   PetscCall(ISOnComm(is2, comm, PETSC_USE_POINTER, &is2g));
120   PetscCall(ISViewFromOptions(is1g, NULL, "-seq_is1_view"));
121   PetscCall(ISViewFromOptions(is2g, NULL, "-seq_is2_view"));
122   /* compare the two ISs */
123   if (user.compare_is) {
124     PetscCall(ISEqualUnsorted(is1g, is2g, &flg));
125     if (!flg) PetscCall(PetscPrintf(comm, "ISs are not equal with type %s with size %d.\n", user.partitioning, size));
126   }
127   PetscCall(ISDestroy(&is1g));
128   PetscCall(ISDestroy(&is2g));
129 
130   /* compare the two PetscSections */
131   PetscCall(PetscSectionViewFromOptions(s1, NULL, "-seq_s1_view"));
132   PetscCall(PetscSectionViewFromOptions(s2, NULL, "-seq_s2_view"));
133   if (user.compare_is) {
134     PetscCall(PetscSectionCompare(s1, s2, &flg));
135     if (!flg) PetscCall(PetscPrintf(comm, "PetscSections are not equal with %s with size %d.\n", user.partitioning, size));
136   }
137 
138   /* distribute both DMs */
139   PetscCall(ScotchResetRandomSeed());
140   PetscCall(DMPlexDistribute(dm1, 0, NULL, &dmdist1));
141   PetscCall(ScotchResetRandomSeed());
142   PetscCall(DMPlexDistribute(dm2, 0, NULL, &dmdist2));
143 
144   /* cleanup */
145   PetscCall(PetscSectionDestroy(&tpws));
146   PetscCall(PetscSectionDestroy(&s1));
147   PetscCall(PetscSectionDestroy(&s2));
148   PetscCall(ISDestroy(&is1));
149   PetscCall(ISDestroy(&is2));
150   PetscCall(DMDestroy(&dm1));
151   PetscCall(DMDestroy(&dm2));
152 
153   /* if distributed DMs are NULL (sequential case), then quit */
154   if (!dmdist1 && !dmdist2) return 0;
155 
156   PetscCall(DMViewFromOptions(dmdist1, NULL, "-dm_dist1_view"));
157   PetscCall(DMViewFromOptions(dmdist2, NULL, "-dm_dist2_view"));
158 
159   /* compare the two distributed DMs */
160   if (user.compare_dm) {
161     PetscCall(DMPlexEqual(dmdist1, dmdist2, &flg));
162     if (!flg) PetscCall(PetscPrintf(comm, "Distributed DMs are not equal %s with size %d.\n", user.partitioning, size));
163   }
164 
165   /* if repartitioning is disabled, then quit */
166   if (user.repartitioning[0] == '\0') return 0;
167 
168   if (user.tpw) {
169     PetscCall(PetscSectionCreate(comm, &tpws));
170     PetscCall(PetscSectionSetChart(tpws, 0, size));
171     for (i = 0; i < size; i++) {
172       PetscInt tdof = i % 2 ? i + 1 : size - i;
173       PetscCall(PetscSectionSetDof(tpws, i, tdof));
174     }
175     PetscCall(PetscSectionSetUp(tpws));
176   }
177 
178   /* repartition distributed DM dmdist1 */
179   PetscCall(ScotchResetRandomSeed());
180   PetscCall(DMPlexGetPartitioner(dmdist1, &part1));
181   PetscCall(PetscObjectSetOptionsPrefix((PetscObject)part1, "dp1_"));
182   PetscCall(PetscPartitionerSetType(part1, user.repartitioning));
183   PetscCall(PetscPartitionerSetFromOptions(part1));
184   PetscCall(PetscSectionCreate(comm, &s1));
185   PetscCall(PetscPartitionerDMPlexPartition(part1, dmdist1, tpws, s1, &is1));
186 
187   /* repartition distributed DM dmdist2 */
188   PetscCall(ScotchResetRandomSeed());
189   PetscCall(DMPlexGetPartitioner(dmdist2, &part2));
190   PetscCall(PetscObjectSetOptionsPrefix((PetscObject)part2, "dp2_"));
191   PetscCall(PetscPartitionerSetType(part2, PETSCPARTITIONERMATPARTITIONING));
192   PetscCall(PetscPartitionerMatPartitioningGetMatPartitioning(part2, &mp));
193   PetscCall(MatPartitioningSetType(mp, user.repartitioning));
194   PetscCall(PetscPartitionerSetFromOptions(part2));
195   PetscCall(PetscSectionCreate(comm, &s2));
196   PetscCall(PetscPartitionerDMPlexPartition(part2, dmdist2, tpws, s2, &is2));
197 
198   /* compare the two ISs */
199   PetscCall(ISOnComm(is1, comm, PETSC_USE_POINTER, &is1g));
200   PetscCall(ISOnComm(is2, comm, PETSC_USE_POINTER, &is2g));
201   PetscCall(ISViewFromOptions(is1g, NULL, "-dist_is1_view"));
202   PetscCall(ISViewFromOptions(is2g, NULL, "-dist_is2_view"));
203   if (user.compare_is) {
204     PetscCall(ISEqualUnsorted(is1g, is2g, &flg));
205     if (!flg) PetscCall(PetscPrintf(comm, "Distributed ISs are not equal, with %s with size %d.\n", user.repartitioning, size));
206   }
207   PetscCall(ISDestroy(&is1g));
208   PetscCall(ISDestroy(&is2g));
209 
210   /* compare the two PetscSections */
211   PetscCall(PetscSectionViewFromOptions(s1, NULL, "-dist_s1_view"));
212   PetscCall(PetscSectionViewFromOptions(s2, NULL, "-dist_s2_view"));
213   if (user.compare_is) {
214     PetscCall(PetscSectionCompare(s1, s2, &flg));
215     if (!flg) PetscCall(PetscPrintf(comm, "Distributed PetscSections are not equal, with %s with size %d.\n", user.repartitioning, size));
216   }
217 
218   /* redistribute both distributed DMs */
219   PetscCall(ScotchResetRandomSeed());
220   PetscCall(DMPlexDistribute(dmdist1, 0, NULL, &dm1));
221   PetscCall(ScotchResetRandomSeed());
222   PetscCall(DMPlexDistribute(dmdist2, 0, NULL, &dm2));
223 
224   /* compare the two distributed DMs */
225   PetscCall(DMPlexIsInterpolated(dm1, &interp));
226   if (interp == DMPLEX_INTERPOLATED_NONE) {
227     PetscCall(DMPlexEqual(dm1, dm2, &flg));
228     if (!flg) PetscCall(PetscPrintf(comm, "Redistributed DMs are not equal, with %s with size %d.\n", user.repartitioning, size));
229   }
230 
231   /* cleanup */
232   PetscCall(PetscSectionDestroy(&tpws));
233   PetscCall(PetscSectionDestroy(&s1));
234   PetscCall(PetscSectionDestroy(&s2));
235   PetscCall(ISDestroy(&is1));
236   PetscCall(ISDestroy(&is2));
237   PetscCall(DMDestroy(&dm1));
238   PetscCall(DMDestroy(&dm2));
239   PetscCall(DMDestroy(&dmdist1));
240   PetscCall(DMDestroy(&dmdist2));
241   PetscCall(PetscFinalize());
242   return 0;
243 }
244 
245 /*TEST
246 
247   test:
248     # partition sequential mesh loaded from Exodus file
249     suffix: 0
250     nsize: {{1 2 3 4 8}}
251     requires: chaco parmetis ptscotch exodusii
252     args: -dm_plex_filename ${PETSC_DIR}/share/petsc/datafiles/meshes/blockcylinder-50.exo
253     args: -partitioning {{chaco parmetis ptscotch}} -repartitioning {{parmetis ptscotch}} -tpweight {{0 1}}
254   test:
255     # repartition mesh already partitioned naively by MED loader
256     suffix: 1
257     nsize: {{1 2 3 4 8}}
258     TODO: MED
259     requires: parmetis ptscotch med
260     args: -dm_plex_filename ${PETSC_DIR}/share/petsc/datafiles/meshes/cylinder.med
261     args: -repartition 0 -partitioning {{parmetis ptscotch}}
262   test:
263     # partition mesh generated by ctetgen using scotch, then repartition with scotch, diff view
264     suffix: 3
265     nsize: 4
266     requires: ptscotch ctetgen
267     args: -dm_plex_dim 3 -dm_plex_box_faces 2,3,2 -partitioning ptscotch -repartitioning ptscotch
268     args: -p1_petscpartitioner_view -p2_petscpartitioner_view -dp1_petscpartitioner_view -dp2_petscpartitioner_view -tpweight {{0 1}}
269   test:
270     # partition mesh generated by ctetgen using partitioners supported both by MatPartitioning and PetscPartitioner
271     suffix: 4
272     nsize: {{1 2 3 4 8}}
273     requires: chaco parmetis ptscotch ctetgen
274     args: -dm_plex_dim 3 -dm_plex_box_faces {{2,3,4  5,4,3  7,11,5}} -partitioning {{chaco parmetis ptscotch}} -repartitioning {{parmetis ptscotch}} -tpweight {{0 1}}
275 
276 TEST*/
277