xref: /petsc/src/dm/impls/plex/tests/ex24.c (revision 8fb5bd83c3955fefcf33a54e3bb66920a9fa884b)
1 static char help[] = "Test that MatPartitioning and PetscPartitioner interfaces are equivalent when using PETSCPARTITIONERMATPARTITIONING\n\n";
2 static char FILENAME[] = "ex24.c";
3 
4 #include <petscdmplex.h>
5 #include <petscviewerhdf5.h>
6 
7 #if defined(PETSC_HAVE_PTSCOTCH)
8 EXTERN_C_BEGIN
9 #include <ptscotch.h>
10 EXTERN_C_END
11 #endif
12 
13 typedef struct {
14   PetscBool compare_is;                   /* Compare ISs and PetscSections */
15   PetscBool compare_dm;                   /* Compare DM */
16   PetscBool tpw;                          /* Use target partition weights */
17   char      partitioning[64];
18   char      repartitioning[64];
19 } AppCtx;
20 
21 static PetscErrorCode ProcessOptions(MPI_Comm comm, AppCtx *options)
22 {
23   PetscBool      repartition = PETSC_TRUE;
24 
25   PetscFunctionBegin;
26   options->compare_is = PETSC_FALSE;
27   options->compare_dm = PETSC_FALSE;
28 
29   PetscOptionsBegin(comm, "", "Meshing Interpolation Test Options", "DMPLEX");
30   PetscCall(PetscOptionsBool("-compare_is", "Compare ISs and PetscSections?", FILENAME, options->compare_is, &options->compare_is, NULL));
31   PetscCall(PetscOptionsBool("-compare_dm", "Compare DMs?", FILENAME, options->compare_dm, &options->compare_dm, NULL));
32   PetscCall(PetscStrncpy(options->partitioning,MATPARTITIONINGPARMETIS,sizeof(options->partitioning)));
33   PetscCall(PetscOptionsString("-partitioning","The mat partitioning type to test","None",options->partitioning, options->partitioning,sizeof(options->partitioning),NULL));
34   PetscCall(PetscOptionsBool("-repartition", "Partition again after the first partition?", FILENAME, repartition, &repartition, NULL));
35   if (repartition) {
36     PetscCall(PetscStrncpy(options->repartitioning,MATPARTITIONINGPARMETIS,64));
37     PetscCall(PetscOptionsString("-repartitioning","The mat partitioning type to test (second partitioning)","None", options->repartitioning, options->repartitioning,sizeof(options->repartitioning),NULL));
38   } else {
39     options->repartitioning[0] = '\0';
40   }
41   PetscCall(PetscOptionsBool("-tpweight", "Use target partition weights", FILENAME, options->tpw, &options->tpw, NULL));
42   PetscOptionsEnd();
43   PetscFunctionReturn(0);
44 }
45 
46 static PetscErrorCode ScotchResetRandomSeed()
47 {
48   PetscFunctionBegin;
49 #if defined(PETSC_HAVE_PTSCOTCH)
50   SCOTCH_randomReset();
51 #endif
52   PetscFunctionReturn(0);
53 }
54 
55 static PetscErrorCode CreateMesh(MPI_Comm comm, AppCtx *user, DM *dm)
56 {
57   PetscFunctionBegin;
58   PetscCall(DMCreate(comm, dm));
59   PetscCall(DMSetType(*dm, DMPLEX));
60   PetscCall(DMPlexDistributeSetDefault(*dm, PETSC_FALSE));
61   PetscCall(DMSetFromOptions(*dm));
62   PetscCall(DMViewFromOptions(*dm, NULL, "-dm_view"));
63   PetscFunctionReturn(0);
64 }
65 
66 int main(int argc, char **argv)
67 {
68   MPI_Comm       comm;
69   DM             dm1, dm2, dmdist1, dmdist2;
70   DMPlexInterpolatedFlag interp;
71   MatPartitioning mp;
72   PetscPartitioner part1, part2;
73   AppCtx         user;
74   IS             is1=NULL, is2=NULL;
75   IS             is1g, is2g;
76   PetscSection   s1=NULL, s2=NULL, tpws = NULL;
77   PetscInt       i;
78   PetscBool      flg;
79   PetscMPIInt    size;
80 
81   PetscCall(PetscInitialize(&argc, &argv, NULL,help));
82   comm = PETSC_COMM_WORLD;
83   PetscCallMPI(MPI_Comm_size(comm,&size));
84   PetscCall(ProcessOptions(comm, &user));
85   PetscCall(CreateMesh(comm, &user, &dm1));
86   PetscCall(CreateMesh(comm, &user, &dm2));
87 
88   if (user.tpw) {
89     PetscCall(PetscSectionCreate(comm, &tpws));
90     PetscCall(PetscSectionSetChart(tpws, 0, size));
91     for (i=0;i<size;i++) {
92       PetscInt tdof = i%2 ? 2*i -1 : i+2;
93       PetscCall(PetscSectionSetDof(tpws, i, tdof));
94     }
95     if (size > 1) { /* test zero tpw entry */
96       PetscCall(PetscSectionSetDof(tpws, 0, 0));
97     }
98     PetscCall(PetscSectionSetUp(tpws));
99   }
100 
101   /* partition dm1 using PETSCPARTITIONERPARMETIS */
102   PetscCall(ScotchResetRandomSeed());
103   PetscCall(DMPlexGetPartitioner(dm1, &part1));
104   PetscCall(PetscObjectSetOptionsPrefix((PetscObject)part1,"p1_"));
105   PetscCall(PetscPartitionerSetType(part1, user.partitioning));
106   PetscCall(PetscPartitionerSetFromOptions(part1));
107   PetscCall(PetscSectionCreate(comm, &s1));
108   PetscCall(PetscPartitionerDMPlexPartition(part1, dm1, tpws, s1, &is1));
109 
110   /* partition dm2 using PETSCPARTITIONERMATPARTITIONING with MATPARTITIONINGPARMETIS */
111   PetscCall(ScotchResetRandomSeed());
112   PetscCall(DMPlexGetPartitioner(dm2, &part2));
113   PetscCall(PetscObjectSetOptionsPrefix((PetscObject)part2,"p2_"));
114   PetscCall(PetscPartitionerSetType(part2, PETSCPARTITIONERMATPARTITIONING));
115   PetscCall(PetscPartitionerMatPartitioningGetMatPartitioning(part2, &mp));
116   PetscCall(MatPartitioningSetType(mp, user.partitioning));
117   PetscCall(PetscPartitionerSetFromOptions(part2));
118   PetscCall(PetscSectionCreate(comm, &s2));
119   PetscCall(PetscPartitionerDMPlexPartition(part2, dm2, tpws, s2, &is2));
120 
121   PetscCall(ISOnComm(is1, comm, PETSC_USE_POINTER, &is1g));
122   PetscCall(ISOnComm(is2, comm, PETSC_USE_POINTER, &is2g));
123   PetscCall(ISViewFromOptions(is1g, NULL, "-seq_is1_view"));
124   PetscCall(ISViewFromOptions(is2g, NULL, "-seq_is2_view"));
125   /* compare the two ISs */
126   if (user.compare_is) {
127     PetscCall(ISEqualUnsorted(is1g, is2g, &flg));
128     if (!flg) PetscCall(PetscPrintf(comm, "ISs are not equal with type %s with size %d.\n",user.partitioning,size));
129   }
130   PetscCall(ISDestroy(&is1g));
131   PetscCall(ISDestroy(&is2g));
132 
133   /* compare the two PetscSections */
134   PetscCall(PetscSectionViewFromOptions(s1, NULL, "-seq_s1_view"));
135   PetscCall(PetscSectionViewFromOptions(s2, NULL, "-seq_s2_view"));
136   if (user.compare_is) {
137     PetscCall(PetscSectionCompare(s1, s2, &flg));
138     if (!flg) PetscCall(PetscPrintf(comm, "PetscSections are not equal with %s with size %d.\n",user.partitioning,size));
139   }
140 
141   /* distribute both DMs */
142   PetscCall(ScotchResetRandomSeed());
143   PetscCall(DMPlexDistribute(dm1, 0, NULL, &dmdist1));
144   PetscCall(ScotchResetRandomSeed());
145   PetscCall(DMPlexDistribute(dm2, 0, NULL, &dmdist2));
146 
147   /* cleanup */
148   PetscCall(PetscSectionDestroy(&tpws));
149   PetscCall(PetscSectionDestroy(&s1));
150   PetscCall(PetscSectionDestroy(&s2));
151   PetscCall(ISDestroy(&is1));
152   PetscCall(ISDestroy(&is2));
153   PetscCall(DMDestroy(&dm1));
154   PetscCall(DMDestroy(&dm2));
155 
156   /* if distributed DMs are NULL (sequential case), then quit */
157   if (!dmdist1 && !dmdist2) return 0;
158 
159   PetscCall(DMViewFromOptions(dmdist1, NULL, "-dm_dist1_view"));
160   PetscCall(DMViewFromOptions(dmdist2, NULL, "-dm_dist2_view"));
161 
162   /* compare the two distributed DMs */
163   if (user.compare_dm) {
164     PetscCall(DMPlexEqual(dmdist1, dmdist2, &flg));
165     if (!flg) PetscCall(PetscPrintf(comm, "Distributed DMs are not equal %s with size %d.\n",user.partitioning,size));
166   }
167 
168   /* if repartitioning is disabled, then quit */
169   if (user.repartitioning[0] == '\0') return 0;
170 
171   if (user.tpw) {
172     PetscCall(PetscSectionCreate(comm, &tpws));
173     PetscCall(PetscSectionSetChart(tpws, 0, size));
174     for (i=0;i<size;i++) {
175       PetscInt tdof = i%2 ? i+1 : size - i;
176       PetscCall(PetscSectionSetDof(tpws, i, tdof));
177     }
178     PetscCall(PetscSectionSetUp(tpws));
179   }
180 
181   /* repartition distributed DM dmdist1 */
182   PetscCall(ScotchResetRandomSeed());
183   PetscCall(DMPlexGetPartitioner(dmdist1, &part1));
184   PetscCall(PetscObjectSetOptionsPrefix((PetscObject)part1,"dp1_"));
185   PetscCall(PetscPartitionerSetType(part1, user.repartitioning));
186   PetscCall(PetscPartitionerSetFromOptions(part1));
187   PetscCall(PetscSectionCreate(comm, &s1));
188   PetscCall(PetscPartitionerDMPlexPartition(part1, dmdist1, tpws, s1, &is1));
189 
190   /* repartition distributed DM dmdist2 */
191   PetscCall(ScotchResetRandomSeed());
192   PetscCall(DMPlexGetPartitioner(dmdist2, &part2));
193   PetscCall(PetscObjectSetOptionsPrefix((PetscObject)part2,"dp2_"));
194   PetscCall(PetscPartitionerSetType(part2, PETSCPARTITIONERMATPARTITIONING));
195   PetscCall(PetscPartitionerMatPartitioningGetMatPartitioning(part2, &mp));
196   PetscCall(MatPartitioningSetType(mp, user.repartitioning));
197   PetscCall(PetscPartitionerSetFromOptions(part2));
198   PetscCall(PetscSectionCreate(comm, &s2));
199   PetscCall(PetscPartitionerDMPlexPartition(part2, dmdist2, tpws, s2, &is2));
200 
201   /* compare the two ISs */
202   PetscCall(ISOnComm(is1, comm, PETSC_USE_POINTER, &is1g));
203   PetscCall(ISOnComm(is2, comm, PETSC_USE_POINTER, &is2g));
204   PetscCall(ISViewFromOptions(is1g, NULL, "-dist_is1_view"));
205   PetscCall(ISViewFromOptions(is2g, NULL, "-dist_is2_view"));
206   if (user.compare_is) {
207     PetscCall(ISEqualUnsorted(is1g, is2g, &flg));
208     if (!flg) PetscCall(PetscPrintf(comm, "Distributed ISs are not equal, with %s with size %d.\n",user.repartitioning,size));
209   }
210   PetscCall(ISDestroy(&is1g));
211   PetscCall(ISDestroy(&is2g));
212 
213   /* compare the two PetscSections */
214   PetscCall(PetscSectionViewFromOptions(s1, NULL, "-dist_s1_view"));
215   PetscCall(PetscSectionViewFromOptions(s2, NULL, "-dist_s2_view"));
216   if (user.compare_is) {
217     PetscCall(PetscSectionCompare(s1, s2, &flg));
218     if (!flg) PetscCall(PetscPrintf(comm, "Distributed PetscSections are not equal, with %s with size %d.\n",user.repartitioning,size));
219   }
220 
221   /* redistribute both distributed DMs */
222   PetscCall(ScotchResetRandomSeed());
223   PetscCall(DMPlexDistribute(dmdist1, 0, NULL, &dm1));
224   PetscCall(ScotchResetRandomSeed());
225   PetscCall(DMPlexDistribute(dmdist2, 0, NULL, &dm2));
226 
227   /* compare the two distributed DMs */
228   PetscCall(DMPlexIsInterpolated(dm1, &interp));
229   if (interp == DMPLEX_INTERPOLATED_NONE) {
230     PetscCall(DMPlexEqual(dm1, dm2, &flg));
231     if (!flg) PetscCall(PetscPrintf(comm, "Redistributed DMs are not equal, with %s with size %d.\n",user.repartitioning,size));
232   }
233 
234   /* cleanup */
235   PetscCall(PetscSectionDestroy(&tpws));
236   PetscCall(PetscSectionDestroy(&s1));
237   PetscCall(PetscSectionDestroy(&s2));
238   PetscCall(ISDestroy(&is1));
239   PetscCall(ISDestroy(&is2));
240   PetscCall(DMDestroy(&dm1));
241   PetscCall(DMDestroy(&dm2));
242   PetscCall(DMDestroy(&dmdist1));
243   PetscCall(DMDestroy(&dmdist2));
244   PetscCall(PetscFinalize());
245   return 0;
246 }
247 
248 /*TEST
249 
250   test:
251     # partition sequential mesh loaded from Exodus file
252     suffix: 0
253     nsize: {{1 2 3 4 8}}
254     requires: chaco parmetis ptscotch exodusii
255     args: -dm_plex_filename ${PETSC_DIR}/share/petsc/datafiles/meshes/blockcylinder-50.exo
256     args: -partitioning {{chaco parmetis ptscotch}} -repartitioning {{parmetis ptscotch}} -tpweight {{0 1}}
257   test:
258     # repartition mesh already partitioned naively by MED loader
259     suffix: 1
260     nsize: {{1 2 3 4 8}}
261     TODO: MED
262     requires: parmetis ptscotch med
263     args: -dm_plex_filename ${PETSC_DIR}/share/petsc/datafiles/meshes/cylinder.med
264     args: -repartition 0 -partitioning {{parmetis ptscotch}}
265   test:
266     # partition mesh generated by ctetgen using scotch, then repartition with scotch, diff view
267     suffix: 3
268     nsize: 4
269     requires: ptscotch ctetgen
270     args: -dm_plex_dim 3 -dm_plex_box_faces 2,3,2 -partitioning ptscotch -repartitioning ptscotch
271     args: -p1_petscpartitioner_view -p2_petscpartitioner_view -dp1_petscpartitioner_view -dp2_petscpartitioner_view -tpweight {{0 1}}
272   test:
273     # partition mesh generated by ctetgen using partitioners supported both by MatPartitioning and PetscPartitioner
274     suffix: 4
275     nsize: {{1 2 3 4 8}}
276     requires: chaco parmetis ptscotch ctetgen
277     args: -dm_plex_dim 3 -dm_plex_box_faces {{2,3,4  5,4,3  7,11,5}} -partitioning {{chaco parmetis ptscotch}} -repartitioning {{parmetis ptscotch}} -tpweight {{0 1}}
278 
279 TEST*/
280