xref: /libCEED/examples/petsc/bpssphere.c (revision 8ec64e9ae9d5df169dba8c8ee61d8ec8907b8f80)
1 // Copyright (c) 2017-2022, Lawrence Livermore National Security, LLC and other CEED contributors.
2 // All Rights Reserved. See the top-level LICENSE and NOTICE files for details.
3 //
4 // SPDX-License-Identifier: BSD-2-Clause
5 //
6 // This file is part of CEED:  http://github.com/ceed
7 
8 //                        libCEED + PETSc Example: CEED BPs
9 //
10 // This example demonstrates a simple usage of libCEED with PETSc to solve the CEED BP benchmark problems, see http://ceed.exascaleproject.org/bps, on
11 // a closed surface, such as the one of a discrete sphere.
12 //
13 // The code uses higher level communication protocols in DMPlex.
14 //
15 // Build with:
16 //
17 //     make bpssphere [PETSC_DIR=</path/to/petsc>] [CEED_DIR=</path/to/libceed>]
18 //
19 // Sample runs:
20 //
21 //     bpssphere -problem bp1 -degree 3
22 //     bpssphere -problem bp2 -degree 3
23 //     bpssphere -problem bp3 -degree 3
24 //     bpssphere -problem bp4 -degree 3
25 //     bpssphere -problem bp5 -degree 3 -ceed /cpu/self
26 //     bpssphere -problem bp6 -degree 3 -ceed /gpu/cuda
27 //
28 //TESTARGS -ceed {ceed_resource} -test -problem bp3 -degree 3 -dm_refine 2
29 
30 /// @file
31 /// CEED BPs example using PETSc with DMPlex
32 /// See bps.c for a "raw" implementation using a structured grid and bpsdmplex.c for an implementation using an unstructured grid.
33 static const char help[] = "Solve CEED BPs on a sphere using DMPlex in PETSc\n";
34 
35 #include "bpssphere.h"
36 
37 #include <ceed.h>
38 #include <petsc.h>
39 #include <petscdmplex.h>
40 #include <petscksp.h>
41 #include <stdbool.h>
42 #include <string.h>
43 
44 #include "include/libceedsetup.h"
45 #include "include/matops.h"
46 #include "include/petscutils.h"
47 #include "include/petscversion.h"
48 #include "include/sphereproblemdata.h"
49 
50 #if PETSC_VERSION_LT(3, 12, 0)
51 #ifdef PETSC_HAVE_CUDA
52 #include <petsccuda.h>
53 // Note: With PETSc prior to version 3.12.0, providing the source path to include 'cublas_v2.h' will be needed to use 'petsccuda.h'.
54 #endif
55 #endif
56 
57 int main(int argc, char **argv) {
58   MPI_Comm             comm;
59   char                 ceed_resource[PETSC_MAX_PATH_LEN] = "/cpu/self", filename[PETSC_MAX_PATH_LEN];
60   double               my_rt_start, my_rt, rt_min, rt_max;
61   PetscInt             degree = 3, q_extra, l_size, g_size, topo_dim = 2, num_comp_x = 3, num_comp_u = 1, xl_size;
62   PetscScalar         *r;
63   PetscBool            test_mode, benchmark_mode, read_mesh, write_solution, simplex;
64   PetscLogStage        solve_stage;
65   Vec                  X, X_loc, rhs, rhs_loc;
66   Mat                  mat_O;
67   KSP                  ksp;
68   DM                   dm;
69   OperatorApplyContext op_apply_ctx, op_error_ctx;
70   Ceed                 ceed;
71   CeedData             ceed_data;
72   CeedQFunction        qf_error;
73   CeedOperator         op_error;
74   CeedVector           rhs_ceed, target;
75   BPType               bp_choice;
76   VecType              vec_type;
77   PetscMemType         mem_type;
78 
79   PetscCall(PetscInitialize(&argc, &argv, NULL, help));
80   comm = PETSC_COMM_WORLD;
81 
82   // Read command line options
83   PetscOptionsBegin(comm, NULL, "CEED BPs in PETSc", NULL);
84   bp_choice = CEED_BP1;
85   PetscCall(PetscOptionsEnum("-problem", "CEED benchmark problem to solve", NULL, bp_types, (PetscEnum)bp_choice, (PetscEnum *)&bp_choice, NULL));
86   num_comp_u = bp_options[bp_choice].num_comp_u;
87   test_mode  = PETSC_FALSE;
88   PetscCall(PetscOptionsBool("-test", "Testing mode (do not print unless error is large)", NULL, test_mode, &test_mode, NULL));
89   benchmark_mode = PETSC_FALSE;
90   PetscCall(PetscOptionsBool("-benchmark", "Benchmarking mode (prints benchmark statistics)", NULL, benchmark_mode, &benchmark_mode, NULL));
91   write_solution = PETSC_FALSE;
92   PetscCall(PetscOptionsBool("-write_solution", "Write solution for visualization", NULL, write_solution, &write_solution, NULL));
93   degree = test_mode ? 3 : 2;
94   PetscCall(PetscOptionsInt("-degree", "Polynomial degree of tensor product basis", NULL, degree, &degree, NULL));
95   q_extra = bp_options[bp_choice].q_extra;
96   PetscCall(PetscOptionsInt("-q_extra", "Number of extra quadrature points", NULL, q_extra, &q_extra, NULL));
97   PetscCall(PetscOptionsString("-ceed", "CEED resource specifier", NULL, ceed_resource, ceed_resource, sizeof(ceed_resource), NULL));
98   read_mesh = PETSC_FALSE;
99   PetscCall(PetscOptionsString("-mesh", "Read mesh from file", NULL, filename, filename, sizeof(filename), &read_mesh));
100   simplex = PETSC_FALSE;
101   PetscCall(PetscOptionsBool("-simplex", "Use simplices, or tensor product cells", NULL, simplex, &simplex, NULL));
102   PetscOptionsEnd();
103 
104   // Setup DM
105   if (read_mesh) {
106     PetscCall(DMPlexCreateFromFile(PETSC_COMM_WORLD, filename, NULL, PETSC_TRUE, &dm));
107   } else {
108     // Create the mesh as a 0-refined sphere.
109     // This will create a cubic surface, not a box, and will snap to the unit sphere upon refinement.
110     PetscCall(DMPlexCreateSphereMesh(PETSC_COMM_WORLD, topo_dim, simplex, 1., &dm));
111     // Set the object name
112     PetscCall(PetscObjectSetName((PetscObject)dm, "Sphere"));
113     // Refine DMPlex with uniform refinement using runtime option -dm_refine
114     PetscCall(DMPlexSetRefinementUniform(dm, PETSC_TRUE));
115   }
116   PetscCall(DMSetFromOptions(dm));
117   // View DMPlex via runtime option
118   PetscCall(DMViewFromOptions(dm, NULL, "-dm_view"));
119 
120   // Create DM
121   PetscCall(SetupDMByDegree(dm, degree, q_extra, num_comp_u, topo_dim, false));
122 
123   // Create vectors
124   PetscCall(DMCreateGlobalVector(dm, &X));
125   PetscCall(VecGetLocalSize(X, &l_size));
126   PetscCall(VecGetSize(X, &g_size));
127   PetscCall(DMCreateLocalVector(dm, &X_loc));
128   PetscCall(VecGetSize(X_loc, &xl_size));
129   PetscCall(VecDuplicate(X, &rhs));
130 
131   // Operator
132   PetscCall(PetscMalloc1(1, &op_apply_ctx));
133   PetscCall(PetscMalloc1(1, &op_error_ctx));
134   PetscCall(MatCreateShell(comm, l_size, l_size, g_size, g_size, op_apply_ctx, &mat_O));
135   PetscCall(MatShellSetOperation(mat_O, MATOP_MULT, (void (*)(void))MatMult_Ceed));
136 
137   // Set up libCEED
138   CeedInit(ceed_resource, &ceed);
139   CeedMemType mem_type_backend;
140   CeedGetPreferredMemType(ceed, &mem_type_backend);
141 
142   PetscCall(DMGetVecType(dm, &vec_type));
143   if (!vec_type) {  // Not yet set by user -dm_vec_type
144     switch (mem_type_backend) {
145       case CEED_MEM_HOST:
146         vec_type = VECSTANDARD;
147         break;
148       case CEED_MEM_DEVICE: {
149         const char *resolved;
150         CeedGetResource(ceed, &resolved);
151         if (strstr(resolved, "/gpu/cuda")) vec_type = VECCUDA;
152         else if (strstr(resolved, "/gpu/hip/occa")) vec_type = VECSTANDARD;  // https://github.com/CEED/libCEED/issues/678
153         else if (strstr(resolved, "/gpu/hip")) vec_type = VECHIP;
154         else vec_type = VECSTANDARD;
155       }
156     }
157     PetscCall(DMSetVecType(dm, vec_type));
158   }
159 
160   // Print summary
161   if (!test_mode) {
162     PetscInt    P = degree + 1, Q = P + q_extra;
163     const char *used_resource;
164     CeedGetResource(ceed, &used_resource);
165     PetscCall(PetscPrintf(comm,
166                           "\n-- CEED Benchmark Problem %" CeedInt_FMT " on the Sphere -- libCEED + PETSc --\n"
167                           "  libCEED:\n"
168                           "    libCEED Backend                         : %s\n"
169                           "    libCEED Backend MemType                 : %s\n"
170                           "  Mesh:\n"
171                           "    Solution Order (P)                      : %" CeedInt_FMT "\n"
172                           "    Quadrature  Order (Q)                   : %" CeedInt_FMT "\n"
173                           "    Additional quadrature points (q_extra)  : %" CeedInt_FMT "\n"
174                           "    Global nodes                            : %" PetscInt_FMT "\n",
175                           bp_choice + 1, ceed_resource, CeedMemTypes[mem_type_backend], P, Q, q_extra, g_size / num_comp_u));
176   }
177 
178   // Create RHS vector
179   PetscCall(VecDuplicate(X_loc, &rhs_loc));
180   PetscCall(VecZeroEntries(rhs_loc));
181   PetscCall(VecGetArrayAndMemType(rhs_loc, &r, &mem_type));
182   CeedVectorCreate(ceed, xl_size, &rhs_ceed);
183   CeedVectorSetArray(rhs_ceed, MemTypeP2C(mem_type), CEED_USE_POINTER, r);
184 
185   // Setup libCEED's objects
186   PetscCall(PetscMalloc1(1, &ceed_data));
187   PetscCall(SetupLibceedByDegree(dm, ceed, degree, topo_dim, q_extra, num_comp_x, num_comp_u, g_size, xl_size, bp_options[bp_choice], ceed_data, true,
188                                  rhs_ceed, &target));
189 
190   // Gather RHS
191   CeedVectorTakeArray(rhs_ceed, MemTypeP2C(mem_type), NULL);
192   PetscCall(VecRestoreArrayAndMemType(rhs_loc, &r));
193   PetscCall(VecZeroEntries(rhs));
194   PetscCall(DMLocalToGlobal(dm, rhs_loc, ADD_VALUES, rhs));
195   CeedVectorDestroy(&rhs_ceed);
196 
197   // Create the error Q-function
198   CeedQFunctionCreateInterior(ceed, 1, bp_options[bp_choice].error, bp_options[bp_choice].error_loc, &qf_error);
199   CeedQFunctionAddInput(qf_error, "u", num_comp_u, CEED_EVAL_INTERP);
200   CeedQFunctionAddInput(qf_error, "true_soln", num_comp_u, CEED_EVAL_NONE);
201   CeedQFunctionAddInput(qf_error, "qdata", ceed_data->q_data_size, CEED_EVAL_NONE);
202   CeedQFunctionAddOutput(qf_error, "error", num_comp_u, CEED_EVAL_INTERP);
203 
204   // Create the error operator
205   CeedOperatorCreate(ceed, qf_error, NULL, NULL, &op_error);
206   CeedOperatorSetField(op_error, "u", ceed_data->elem_restr_u, ceed_data->basis_u, CEED_VECTOR_ACTIVE);
207   CeedOperatorSetField(op_error, "true_soln", ceed_data->elem_restr_u_i, CEED_BASIS_COLLOCATED, target);
208   CeedOperatorSetField(op_error, "qdata", ceed_data->elem_restr_qd_i, CEED_BASIS_COLLOCATED, ceed_data->q_data);
209   CeedOperatorSetField(op_error, "error", ceed_data->elem_restr_u, ceed_data->basis_u, CEED_VECTOR_ACTIVE);
210 
211   // Set up apply operator context
212   PetscCall(SetupApplyOperatorCtx(comm, dm, ceed, ceed_data, X_loc, op_apply_ctx));
213 
214   // Setup solver
215   PetscCall(KSPCreate(comm, &ksp));
216   {
217     PC pc;
218     PetscCall(KSPGetPC(ksp, &pc));
219     if (bp_choice == CEED_BP1 || bp_choice == CEED_BP2) {
220       PetscCall(PCSetType(pc, PCJACOBI));
221       PetscCall(PCJacobiSetType(pc, PC_JACOBI_ROWSUM));
222     } else {
223       PetscCall(PCSetType(pc, PCNONE));
224       MatNullSpace nullspace;
225 
226       PetscCall(MatNullSpaceCreate(PETSC_COMM_WORLD, PETSC_TRUE, 0, 0, &nullspace));
227       PetscCall(MatSetNullSpace(mat_O, nullspace));
228       PetscCall(MatNullSpaceDestroy(&nullspace));
229     }
230     PetscCall(KSPSetType(ksp, KSPCG));
231     PetscCall(KSPSetNormType(ksp, KSP_NORM_NATURAL));
232     PetscCall(KSPSetTolerances(ksp, 1e-10, PETSC_DEFAULT, PETSC_DEFAULT, PETSC_DEFAULT));
233   }
234   PetscCall(KSPSetFromOptions(ksp));
235   PetscCall(KSPSetOperators(ksp, mat_O, mat_O));
236 
237   // First run, if benchmarking
238   if (benchmark_mode) {
239     PetscCall(KSPSetTolerances(ksp, 1e-10, PETSC_DEFAULT, PETSC_DEFAULT, 1));
240     my_rt_start = MPI_Wtime();
241     PetscCall(KSPSolve(ksp, rhs, X));
242     my_rt = MPI_Wtime() - my_rt_start;
243     PetscCall(MPI_Allreduce(MPI_IN_PLACE, &my_rt, 1, MPI_DOUBLE, MPI_MIN, comm));
244     // Set maxits based on first iteration timing
245     if (my_rt > 0.02) {
246       PetscCall(KSPSetTolerances(ksp, 1e-10, PETSC_DEFAULT, PETSC_DEFAULT, 5));
247     } else {
248       PetscCall(KSPSetTolerances(ksp, 1e-10, PETSC_DEFAULT, PETSC_DEFAULT, 20));
249     }
250   }
251 
252   // Timed solve
253   PetscCall(VecZeroEntries(X));
254   PetscCall(PetscBarrier((PetscObject)ksp));
255 
256   // -- Performance logging
257   PetscCall(PetscLogStageRegister("Solve Stage", &solve_stage));
258   PetscCall(PetscLogStagePush(solve_stage));
259 
260   // -- Solve
261   my_rt_start = MPI_Wtime();
262   PetscCall(KSPSolve(ksp, rhs, X));
263   my_rt = MPI_Wtime() - my_rt_start;
264 
265   // -- Performance logging
266   PetscCall(PetscLogStagePop());
267 
268   // Output results
269   {
270     KSPType            ksp_type;
271     KSPConvergedReason reason;
272     PetscReal          rnorm;
273     PetscInt           its;
274     PetscCall(KSPGetType(ksp, &ksp_type));
275     PetscCall(KSPGetConvergedReason(ksp, &reason));
276     PetscCall(KSPGetIterationNumber(ksp, &its));
277     PetscCall(KSPGetResidualNorm(ksp, &rnorm));
278     if (!test_mode || reason < 0 || rnorm > 1e-8) {
279       PetscCall(PetscPrintf(comm,
280                             "  KSP:\n"
281                             "    KSP Type                                : %s\n"
282                             "    KSP Convergence                         : %s\n"
283                             "    Total KSP Iterations                    : %" PetscInt_FMT "\n"
284                             "    Final rnorm                             : %e\n",
285                             ksp_type, KSPConvergedReasons[reason], its, (double)rnorm));
286     }
287     if (!test_mode) {
288       PetscCall(PetscPrintf(comm, "  Performance:\n"));
289     }
290     {
291       // Set up error operator context
292       PetscCall(SetupErrorOperatorCtx(comm, dm, ceed, ceed_data, X_loc, op_error, op_error_ctx));
293       PetscScalar l2_error;
294       PetscCall(ComputeL2Error(X, &l2_error, op_error_ctx));
295       PetscReal tol = 5e-4;
296       if (!test_mode || l2_error > tol) {
297         PetscCall(MPI_Allreduce(&my_rt, &rt_min, 1, MPI_DOUBLE, MPI_MIN, comm));
298         PetscCall(MPI_Allreduce(&my_rt, &rt_max, 1, MPI_DOUBLE, MPI_MAX, comm));
299         PetscCall(PetscPrintf(comm,
300                               "    L2 Error                                : %e\n"
301                               "    CG Solve Time                           : %g (%g) sec\n",
302                               (double)l2_error, rt_max, rt_min));
303       }
304     }
305     if (benchmark_mode && (!test_mode)) {
306       PetscCall(PetscPrintf(comm, "    DoFs/Sec in CG                            : %g (%g) million\n", 1e-6 * g_size * its / rt_max,
307                             1e-6 * g_size * its / rt_min));
308     }
309   }
310 
311   // Output solution
312   if (write_solution) {
313     PetscViewer vtk_viewer_soln;
314 
315     PetscCall(PetscViewerCreate(comm, &vtk_viewer_soln));
316     PetscCall(PetscViewerSetType(vtk_viewer_soln, PETSCVIEWERVTK));
317     PetscCall(PetscViewerFileSetName(vtk_viewer_soln, "solution.vtu"));
318     PetscCall(VecView(X, vtk_viewer_soln));
319     PetscCall(PetscViewerDestroy(&vtk_viewer_soln));
320   }
321 
322   // Cleanup
323   PetscCall(VecDestroy(&X));
324   PetscCall(VecDestroy(&X_loc));
325   PetscCall(VecDestroy(&op_apply_ctx->Y_loc));
326   PetscCall(VecDestroy(&op_error_ctx->Y_loc));
327   PetscCall(MatDestroy(&mat_O));
328   PetscCall(PetscFree(op_apply_ctx));
329   PetscCall(PetscFree(op_error_ctx));
330   PetscCall(CeedDataDestroy(0, ceed_data));
331   PetscCall(DMDestroy(&dm));
332 
333   PetscCall(VecDestroy(&rhs));
334   PetscCall(VecDestroy(&rhs_loc));
335   PetscCall(KSPDestroy(&ksp));
336   CeedVectorDestroy(&target);
337   CeedQFunctionDestroy(&qf_error);
338   CeedOperatorDestroy(&op_error);
339   CeedDestroy(&ceed);
340   return PetscFinalize();
341 }
342