xref: /petsc/src/snes/tests/ex13.c (revision 503c0ea9b45bcfbcebbb1ea5341243bbc69f0bea)
1 static char help[] = "Benchmark Poisson Problem in 2d and 3d with finite elements.\n\
2 We solve the Poisson problem in a rectangular domain\n\
3 using a parallel unstructured mesh (DMPLEX) to discretize it.\n\n\n";
4 
5 #include <petscdmplex.h>
6 #include <petscsnes.h>
7 #include <petscds.h>
8 #include <petscconvest.h>
9 
10 typedef struct {
11   PetscInt  nit;    /* Number of benchmark iterations */
12   PetscBool strong; /* Do not integrate the Laplacian by parts */
13 } AppCtx;
14 
15 static PetscErrorCode trig_u(PetscInt dim, PetscReal time, const PetscReal x[], PetscInt Nc, PetscScalar *u, void *ctx)
16 {
17   PetscInt d;
18   *u = 0.0;
19   for (d = 0; d < dim; ++d) *u += PetscSinReal(2.0*PETSC_PI*x[d]);
20   return 0;
21 }
22 
23 static void f0_trig_u(PetscInt dim, PetscInt Nf, PetscInt NfAux,
24                       const PetscInt uOff[], const PetscInt uOff_x[], const PetscScalar u[], const PetscScalar u_t[], const PetscScalar u_x[],
25                       const PetscInt aOff[], const PetscInt aOff_x[], const PetscScalar a[], const PetscScalar a_t[], const PetscScalar a_x[],
26                       PetscReal t, const PetscReal x[], PetscInt numConstants, const PetscScalar constants[], PetscScalar f0[])
27 {
28   PetscInt d;
29   for (d = 0; d < dim; ++d) f0[0] += -4.0*PetscSqr(PETSC_PI)*PetscSinReal(2.0*PETSC_PI*x[d]);
30 }
31 
32 static void f1_u(PetscInt dim, PetscInt Nf, PetscInt NfAux,
33                  const PetscInt uOff[], const PetscInt uOff_x[], const PetscScalar u[], const PetscScalar u_t[], const PetscScalar u_x[],
34                  const PetscInt aOff[], const PetscInt aOff_x[], const PetscScalar a[], const PetscScalar a_t[], const PetscScalar a_x[],
35                  PetscReal t, const PetscReal x[], PetscInt numConstants, const PetscScalar constants[], PetscScalar f1[])
36 {
37   PetscInt d;
38   for (d = 0; d < dim; ++d) f1[d] = u_x[d];
39 }
40 
41 static void g3_uu(PetscInt dim, PetscInt Nf, PetscInt NfAux,
42                   const PetscInt uOff[], const PetscInt uOff_x[], const PetscScalar u[], const PetscScalar u_t[], const PetscScalar u_x[],
43                   const PetscInt aOff[], const PetscInt aOff_x[], const PetscScalar a[], const PetscScalar a_t[], const PetscScalar a_x[],
44                   PetscReal t, PetscReal u_tShift, const PetscReal x[], PetscInt numConstants, const PetscScalar constants[], PetscScalar g3[])
45 {
46   PetscInt d;
47   for (d = 0; d < dim; ++d) g3[d*dim+d] = 1.0;
48 }
49 
50 static PetscErrorCode quadratic_u(PetscInt dim, PetscReal time, const PetscReal x[], PetscInt Nc, PetscScalar *u, void *ctx)
51 {
52   *u = PetscSqr(x[0]) + PetscSqr(x[1]);
53   return 0;
54 }
55 
56 static void f0_strong_u(PetscInt dim, PetscInt Nf, PetscInt NfAux,
57                         const PetscInt uOff[], const PetscInt uOff_x[], const PetscScalar u[], const PetscScalar u_t[], const PetscScalar u_x[],
58                         const PetscInt aOff[], const PetscInt aOff_x[], const PetscScalar a[], const PetscScalar a_t[], const PetscScalar a_x[],
59                         PetscReal t, const PetscReal x[], PetscInt numConstants, const PetscScalar constants[], PetscScalar f0[])
60 {
61   PetscInt d;
62   for (d = 0; d < dim; ++d) f0[0] -= u_x[dim + d*dim+d];
63   f0[0] += 4.0;
64 }
65 
66 static PetscErrorCode ProcessOptions(MPI_Comm comm, AppCtx *options)
67 {
68   PetscErrorCode ierr;
69 
70   PetscFunctionBeginUser;
71   options->nit    = 10;
72   options->strong = PETSC_FALSE;
73   ierr = PetscOptionsBegin(comm, "", "Poisson Problem Options", "DMPLEX");PetscCall(ierr);
74   PetscCall(PetscOptionsInt("-benchmark_it", "Solve the benchmark problem this many times", "ex13.c", options->nit, &options->nit, NULL));
75   PetscCall(PetscOptionsBool("-strong", "Do not integrate the Laplacian by parts", "ex13.c", options->strong, &options->strong, NULL));
76   ierr = PetscOptionsEnd();PetscCall(ierr);
77   PetscFunctionReturn(0);
78 }
79 
80 static PetscErrorCode CreateMesh(MPI_Comm comm, AppCtx *user, DM *dm)
81 {
82   PetscFunctionBeginUser;
83   PetscCall(DMCreate(comm, dm));
84   PetscCall(DMSetType(*dm, DMPLEX));
85   PetscCall(DMSetFromOptions(*dm));
86   PetscCall(DMSetApplicationContext(*dm, user));
87   PetscCall(DMViewFromOptions(*dm, NULL, "-dm_view"));
88   PetscFunctionReturn(0);
89 }
90 
91 static PetscErrorCode SetupPrimalProblem(DM dm, AppCtx *user)
92 {
93   PetscDS        ds;
94   DMLabel        label;
95   const PetscInt id = 1;
96 
97   PetscFunctionBeginUser;
98   PetscCall(DMGetDS(dm, &ds));
99   PetscCall(DMGetLabel(dm, "marker", &label));
100   if (user->strong) {
101     PetscCall(PetscDSSetResidual(ds, 0, f0_strong_u, NULL));
102     PetscCall(PetscDSSetExactSolution(ds, 0, quadratic_u, user));
103     PetscCall(DMAddBoundary(dm, DM_BC_ESSENTIAL, "wall", label, 1, &id, 0, 0, NULL, (void (*)(void)) quadratic_u, NULL, user, NULL));
104   } else {
105     PetscCall(PetscDSSetResidual(ds, 0, f0_trig_u, f1_u));
106     PetscCall(PetscDSSetJacobian(ds, 0, 0, NULL, NULL, NULL, g3_uu));
107     PetscCall(PetscDSSetExactSolution(ds, 0, trig_u, user));
108     PetscCall(DMAddBoundary(dm, DM_BC_ESSENTIAL, "wall", label, 1, &id, 0, 0, NULL, (void (*)(void)) trig_u, NULL, user, NULL));
109   }
110   PetscFunctionReturn(0);
111 }
112 
113 static PetscErrorCode SetupDiscretization(DM dm, const char name[], PetscErrorCode (*setup)(DM, AppCtx *), AppCtx *user)
114 {
115   DM             cdm = dm;
116   PetscFE        fe;
117   DMPolytopeType ct;
118   PetscBool      simplex;
119   PetscInt       dim, cStart;
120   char           prefix[PETSC_MAX_PATH_LEN];
121 
122   PetscFunctionBeginUser;
123   PetscCall(DMGetDimension(dm, &dim));
124   PetscCall(DMPlexGetHeightStratum(dm, 0, &cStart, NULL));
125   PetscCall(DMPlexGetCellType(dm, cStart, &ct));
126   simplex = DMPolytopeTypeGetNumVertices(ct) == DMPolytopeTypeGetDim(ct)+1 ? PETSC_TRUE : PETSC_FALSE;
127   /* Create finite element */
128   PetscCall(PetscSNPrintf(prefix, PETSC_MAX_PATH_LEN, "%s_", name));
129   PetscCall(PetscFECreateDefault(PETSC_COMM_SELF, dim, 1, simplex, name ? prefix : NULL, -1, &fe));
130   PetscCall(PetscObjectSetName((PetscObject) fe, name));
131   /* Set discretization and boundary conditions for each mesh */
132   PetscCall(DMSetField(dm, 0, NULL, (PetscObject) fe));
133   PetscCall(DMCreateDS(dm));
134   PetscCall((*setup)(dm, user));
135   while (cdm) {
136     PetscCall(DMCopyDisc(dm,cdm));
137     /* TODO: Check whether the boundary of coarse meshes is marked */
138     PetscCall(DMGetCoarseDM(cdm, &cdm));
139   }
140   PetscCall(PetscFEDestroy(&fe));
141   PetscFunctionReturn(0);
142 }
143 
144 int main(int argc, char **argv)
145 {
146   DM             dm;   /* Problem specification */
147   SNES           snes; /* Nonlinear solver */
148   Vec            u;    /* Solutions */
149   AppCtx         user; /* User-defined work context */
150 
151   PetscCall(PetscInitialize(&argc, &argv, NULL,help));
152   PetscCall(ProcessOptions(PETSC_COMM_WORLD, &user));
153   /* Primal system */
154   PetscCall(SNESCreate(PETSC_COMM_WORLD, &snes));
155   PetscCall(CreateMesh(PETSC_COMM_WORLD, &user, &dm));
156   PetscCall(SNESSetDM(snes, dm));
157   PetscCall(SetupDiscretization(dm, "potential", SetupPrimalProblem, &user));
158   PetscCall(DMCreateGlobalVector(dm, &u));
159   PetscCall(VecSet(u, 0.0));
160   PetscCall(PetscObjectSetName((PetscObject) u, "potential"));
161   PetscCall(DMPlexSetSNESLocalFEM(dm, &user, &user, &user));
162   PetscCall(SNESSetFromOptions(snes));
163   PetscCall(DMSNESCheckFromOptions(snes, u));
164   PetscCall(SNESSolve(snes, NULL, u));
165   /* Benchmark system */
166   if (user.nit) {
167 #if defined(PETSC_USE_LOG)
168     PetscLogStage kspstage,pcstage;
169 #endif
170     KSP       ksp;
171     PC        pc;
172     Vec       b;
173     PetscInt  i;
174     PetscLogDouble time;
175     PetscCall(PetscOptionsClearValue(NULL,"-ksp_monitor"));
176     PetscCall(PetscOptionsClearValue(NULL,"-ksp_view"));
177     PetscCall(SNESGetKSP(snes, &ksp));
178     PetscCall(SNESGetSolution(snes, &u));
179     PetscCall(KSPSetFromOptions(ksp));
180     PetscCall(VecSet(u, 0.0));
181     PetscCall(SNESGetFunction(snes, &b, NULL, NULL));
182     PetscCall(KSPGetPC(ksp, &pc));
183     PetscCall(PetscLogStageRegister("PCSetUp", &pcstage));
184     PetscCall(PetscLogStagePush(pcstage));
185     PetscCall(PCSetUp(pc));
186     PetscCall(PetscLogStagePop());
187     PetscCall(PetscLogStageRegister("KSP Solve only", &kspstage));
188     PetscCall(PetscTime(&time));
189     PetscCall(PetscLogStagePush(kspstage));
190     for (i=0;i<user.nit;i++) {
191       PetscCall(VecZeroEntries(u));
192       PetscCall(KSPSolve(ksp, b, u));
193     }
194     PetscCall(PetscLogStagePop());
195     PetscCall(PetscTimeSubtract(&time));
196     // ierr = PetscPrintf(PETSC_COMM_WORLD,"Solve time: %g\n",-time); // breaks CI
197   }
198   PetscCall(SNESGetSolution(snes, &u));
199   PetscCall(VecViewFromOptions(u, NULL, "-potential_view"));
200   /* Cleanup */
201   PetscCall(VecDestroy(&u));
202   PetscCall(SNESDestroy(&snes));
203   PetscCall(DMDestroy(&dm));
204   PetscCall(PetscFinalize());
205   return 0;
206 }
207 
208 /*TEST
209 
210   test:
211     suffix: strong
212     requires: triangle
213     args: -dm_plex_dim 2 -dm_refine 1 -benchmark_it 0 -dmsnes_check \
214           -potential_petscspace_degree 2 -dm_ds_jet_degree 2 -strong
215 
216   test:
217     suffix: bench
218     nsize: 4
219     args: -dm_plex_dim 3 -dm_plex_simplex 0 -dm_plex_box_faces 2,2,8 -dm_refine 1 \
220           -petscpartitioner_type simple -petscpartitioner_simple_process_grid 1,1,2 -petscpartitioner_simple_node_grid 1,1,2 \
221           -potential_petscspace_degree 2 -ksp_type cg -pc_type gamg -pc_gamg_esteig_ksp_type cg -pc_gamg_esteig_ksp_max_it 10 -benchmark_it 1 -dm_view -snes_rtol 1.e-4
222 
223   test:
224     suffix: comparison
225     nsize: 4
226     args: -dm_plex_dim 2 -dm_plex_box_faces 4,4 -dm_refine 3 -petscpartitioner_simple_process_grid 2,2 \
227       -petscpartitioner_simple_node_grid 1,1 -potential_petscspace_degree 2 -petscpartitioner_type simple \
228       -dm_plex_simplex 0 -snes_monitor_short -snes_type ksponly -dm_view -pc_type gamg -pc_gamg_esteig_ksp_type cg -pc_gamg_esteig_ksp_max_it 10 -pc_gamg_process_eq_limit 400 -ksp_norm_type unpreconditioned \
229       -pc_gamg_coarse_eq_limit 10 -snes_converged_reason -ksp_converged_reason -snes_rtol 1.e-4
230 
231   test:
232     suffix: cuda
233     nsize: 4
234     requires: cuda
235     output_file: output/ex13_comparison.out
236     args: -dm_plex_dim 2 -dm_plex_box_faces 4,4 -dm_refine 3 -petscpartitioner_simple_process_grid 2,2 \
237       -petscpartitioner_simple_node_grid 1,1 -potential_petscspace_degree 2 -petscpartitioner_type simple \
238       -dm_plex_simplex 0 -snes_monitor_short -snes_type ksponly -dm_view -pc_type gamg -pc_gamg_esteig_ksp_type cg -pc_gamg_esteig_ksp_max_it 10 -pc_gamg_process_eq_limit 400 -ksp_norm_type unpreconditioned \
239       -pc_gamg_coarse_eq_limit 10 -snes_converged_reason -ksp_converged_reason -snes_rtol 1.e-4 -dm_mat_type aijcusparse -dm_vec_type cuda
240 
241   test:
242     suffix: kokkos_comp
243     nsize: 4
244     requires: !sycl kokkos_kernels
245     output_file: output/ex13_comparison.out
246     args: -dm_plex_dim 2 -dm_plex_box_faces 4,4 -dm_refine 3 -petscpartitioner_simple_process_grid 2,2 \
247       -petscpartitioner_simple_node_grid 1,1 -potential_petscspace_degree 2 -petscpartitioner_type simple \
248       -dm_plex_simplex 0 -snes_monitor_short -snes_type ksponly -dm_view -pc_type gamg -pc_gamg_esteig_ksp_type cg -pc_gamg_esteig_ksp_max_it 10 -pc_gamg_process_eq_limit 400 -ksp_norm_type unpreconditioned \
249       -pc_gamg_coarse_eq_limit 10 -snes_converged_reason -ksp_converged_reason -snes_rtol 1.e-4 -dm_mat_type aijkokkos -dm_vec_type kokkos
250 
251   test:
252     nsize: 4
253     requires: !sycl kokkos_kernels
254     suffix: kokkos
255     args: -dm_plex_dim 2 -dm_plex_box_faces 2,8 -petscpartitioner_type simple -petscpartitioner_simple_process_grid 2,1 \
256           -petscpartitioner_simple_node_grid 2,1 -dm_plex_simplex 0 -potential_petscspace_degree 1 -dm_refine 1 -ksp_type cg -pc_type gamg -pc_gamg_esteig_ksp_type cg -pc_gamg_esteig_ksp_max_it 10 -ksp_norm_type unpreconditioned \
257           -pc_gamg_esteig_ksp_type cg -ksp_converged_reason -snes_monitor_short -snes_rtol 1.e-4 -dm_view -dm_mat_type aijkokkos -dm_vec_type kokkos
258 
259   test:
260     suffix: aijmkl_comp
261     nsize: 4
262     requires: mkl_sparse
263     output_file: output/ex13_comparison.out
264     args: -dm_plex_dim 2 -dm_plex_box_faces 4,4 -dm_refine 3 -petscpartitioner_simple_process_grid 2,2 \
265       -petscpartitioner_simple_node_grid 1,1 -potential_petscspace_degree 2 -petscpartitioner_type simple \
266       -dm_plex_simplex 0 -snes_monitor_short -snes_type ksponly -dm_view -pc_type gamg -pc_gamg_esteig_ksp_type cg -pc_gamg_esteig_ksp_max_it 10 -pc_gamg_process_eq_limit 400 -ksp_norm_type unpreconditioned \
267       -pc_gamg_coarse_eq_limit 10 -snes_converged_reason -ksp_converged_reason -snes_rtol 1.e-4 -dm_mat_type aijmkl
268 
269   test:
270     suffix: aijmkl_seq
271     nsize: 1
272     requires: mkl_sparse
273     TODO: broken (INDEFINITE PC)
274     args: -dm_plex_dim 3 -dm_plex_box_faces 4,4,4 -dm_refine 1 -petscpartitioner_type simple -potential_petscspace_degree 1 -dm_plex_simplex 0 -snes_monitor_short \
275           -snes_type ksponly -dm_view -pc_type gamg -pc_gamg_sym_graph 0 -pc_gamg_threshold -1 -pc_gamg_square_graph 10 -pc_gamg_process_eq_limit 400 \
276           -pc_gamg_reuse_interpolation -pc_gamg_coarse_eq_limit 10 -pc_gamg_esteig_ksp_type cg -ksp_type cg -ksp_norm_type unpreconditioned -snes_converged_reason \
277           -ksp_converged_reason -snes_rtol 1.e-4 -dm_mat_type aijmkl -dm_vec_type standard
278 
279 TEST*/
280