xref: /libCEED/examples/petsc/bpssphere.c (revision 2b730f8b5a9c809740a0b3b302db43a719c636b1)
1 // Copyright (c) 2017-2022, Lawrence Livermore National Security, LLC and other CEED contributors.
2 // All Rights Reserved. See the top-level LICENSE and NOTICE files for details.
3 //
4 // SPDX-License-Identifier: BSD-2-Clause
5 //
6 // This file is part of CEED:  http://github.com/ceed
7 
8 //                        libCEED + PETSc Example: CEED BPs
9 //
10 // This example demonstrates a simple usage of libCEED with PETSc to solve the
11 // CEED BP benchmark problems, see http://ceed.exascaleproject.org/bps,
12 // on a closed surface, such as the one of a discrete sphere.
13 //
14 // The code uses higher level communication protocols in DMPlex.
15 //
16 // Build with:
17 //
18 //     make bpssphere [PETSC_DIR=</path/to/petsc>] [CEED_DIR=</path/to/libceed>]
19 //
20 // Sample runs:
21 //
22 //     bpssphere -problem bp1 -degree 3
23 //     bpssphere -problem bp2 -degree 3
24 //     bpssphere -problem bp3 -degree 3
25 //     bpssphere -problem bp4 -degree 3
26 //     bpssphere -problem bp5 -degree 3 -ceed /cpu/self
27 //     bpssphere -problem bp6 -degree 3 -ceed /gpu/cuda
28 //
29 //TESTARGS -ceed {ceed_resource} -test -problem bp3 -degree 3 -dm_refine 2
30 
31 /// @file
32 /// CEED BPs example using PETSc with DMPlex
33 /// See bps.c for a "raw" implementation using a structured grid.
34 /// and bpsdmplex.c for an implementation using an unstructured grid.
35 static const char help[] = "Solve CEED BPs on a sphere using DMPlex in PETSc\n";
36 
37 #include "bpssphere.h"
38 
39 #include <ceed.h>
40 #include <petsc.h>
41 #include <petscdmplex.h>
42 #include <petscksp.h>
43 #include <stdbool.h>
44 #include <string.h>
45 
46 #include "include/libceedsetup.h"
47 #include "include/matops.h"
48 #include "include/petscutils.h"
49 #include "include/petscversion.h"
50 #include "include/sphereproblemdata.h"
51 
52 #if PETSC_VERSION_LT(3, 12, 0)
53 #ifdef PETSC_HAVE_CUDA
54 #include <petsccuda.h>
55 // Note: With PETSc prior to version 3.12.0, providing the source path to
56 //       include 'cublas_v2.h' will be needed to use 'petsccuda.h'.
57 #endif
58 #endif
59 
60 int main(int argc, char **argv) {
61   MPI_Comm             comm;
62   char                 ceed_resource[PETSC_MAX_PATH_LEN] = "/cpu/self", filename[PETSC_MAX_PATH_LEN];
63   double               my_rt_start, my_rt, rt_min, rt_max;
64   PetscInt             degree = 3, q_extra, l_size, g_size, topo_dim = 2, num_comp_x = 3, num_comp_u = 1, xl_size;
65   PetscScalar         *r;
66   PetscBool            test_mode, benchmark_mode, read_mesh, write_solution, simplex;
67   PetscLogStage        solve_stage;
68   Vec                  X, X_loc, rhs, rhs_loc;
69   Mat                  mat_O;
70   KSP                  ksp;
71   DM                   dm;
72   OperatorApplyContext op_apply_ctx, op_error_ctx;
73   Ceed                 ceed;
74   CeedData             ceed_data;
75   CeedQFunction        qf_error;
76   CeedOperator         op_error;
77   CeedVector           rhs_ceed, target;
78   BPType               bp_choice;
79   VecType              vec_type;
80   PetscMemType         mem_type;
81 
82   PetscCall(PetscInitialize(&argc, &argv, NULL, help));
83   comm = PETSC_COMM_WORLD;
84 
85   // Read command line options
86   PetscOptionsBegin(comm, NULL, "CEED BPs in PETSc", NULL);
87   bp_choice = CEED_BP1;
88   PetscCall(PetscOptionsEnum("-problem", "CEED benchmark problem to solve", NULL, bp_types, (PetscEnum)bp_choice, (PetscEnum *)&bp_choice, NULL));
89   num_comp_u = bp_options[bp_choice].num_comp_u;
90   test_mode  = PETSC_FALSE;
91   PetscCall(PetscOptionsBool("-test", "Testing mode (do not print unless error is large)", NULL, test_mode, &test_mode, NULL));
92   benchmark_mode = PETSC_FALSE;
93   PetscCall(PetscOptionsBool("-benchmark", "Benchmarking mode (prints benchmark statistics)", NULL, benchmark_mode, &benchmark_mode, NULL));
94   write_solution = PETSC_FALSE;
95   PetscCall(PetscOptionsBool("-write_solution", "Write solution for visualization", NULL, write_solution, &write_solution, NULL));
96   degree = test_mode ? 3 : 2;
97   PetscCall(PetscOptionsInt("-degree", "Polynomial degree of tensor product basis", NULL, degree, &degree, NULL));
98   q_extra = bp_options[bp_choice].q_extra;
99   PetscCall(PetscOptionsInt("-q_extra", "Number of extra quadrature points", NULL, q_extra, &q_extra, NULL));
100   PetscCall(PetscOptionsString("-ceed", "CEED resource specifier", NULL, ceed_resource, ceed_resource, sizeof(ceed_resource), NULL));
101   read_mesh = PETSC_FALSE;
102   PetscCall(PetscOptionsString("-mesh", "Read mesh from file", NULL, filename, filename, sizeof(filename), &read_mesh));
103   simplex = PETSC_FALSE;
104   PetscCall(PetscOptionsBool("-simplex", "Use simplices, or tensor product cells", NULL, simplex, &simplex, NULL));
105   PetscOptionsEnd();
106 
107   // Setup DM
108   if (read_mesh) {
109     PetscCall(DMPlexCreateFromFile(PETSC_COMM_WORLD, filename, NULL, PETSC_TRUE, &dm));
110   } else {
111     // Create the mesh as a 0-refined sphere. This will create a cubic surface,
112     // not a box, and will snap to the unit sphere upon refinement.
113     PetscCall(DMPlexCreateSphereMesh(PETSC_COMM_WORLD, topo_dim, simplex, 1., &dm));
114     // Set the object name
115     PetscCall(PetscObjectSetName((PetscObject)dm, "Sphere"));
116     // Refine DMPlex with uniform refinement using runtime option -dm_refine
117     PetscCall(DMPlexSetRefinementUniform(dm, PETSC_TRUE));
118   }
119   PetscCall(DMSetFromOptions(dm));
120   // View DMPlex via runtime option
121   PetscCall(DMViewFromOptions(dm, NULL, "-dm_view"));
122 
123   // Create DM
124   PetscCall(SetupDMByDegree(dm, degree, q_extra, num_comp_u, topo_dim, false));
125 
126   // Create vectors
127   PetscCall(DMCreateGlobalVector(dm, &X));
128   PetscCall(VecGetLocalSize(X, &l_size));
129   PetscCall(VecGetSize(X, &g_size));
130   PetscCall(DMCreateLocalVector(dm, &X_loc));
131   PetscCall(VecGetSize(X_loc, &xl_size));
132   PetscCall(VecDuplicate(X, &rhs));
133 
134   // Operator
135   PetscCall(PetscMalloc1(1, &op_apply_ctx));
136   PetscCall(PetscMalloc1(1, &op_error_ctx));
137   PetscCall(MatCreateShell(comm, l_size, l_size, g_size, g_size, op_apply_ctx, &mat_O));
138   PetscCall(MatShellSetOperation(mat_O, MATOP_MULT, (void (*)(void))MatMult_Ceed));
139 
140   // Set up libCEED
141   CeedInit(ceed_resource, &ceed);
142   CeedMemType mem_type_backend;
143   CeedGetPreferredMemType(ceed, &mem_type_backend);
144 
145   PetscCall(DMGetVecType(dm, &vec_type));
146   if (!vec_type) {  // Not yet set by user -dm_vec_type
147     switch (mem_type_backend) {
148       case CEED_MEM_HOST:
149         vec_type = VECSTANDARD;
150         break;
151       case CEED_MEM_DEVICE: {
152         const char *resolved;
153         CeedGetResource(ceed, &resolved);
154         if (strstr(resolved, "/gpu/cuda")) vec_type = VECCUDA;
155         else if (strstr(resolved, "/gpu/hip/occa")) vec_type = VECSTANDARD;  // https://github.com/CEED/libCEED/issues/678
156         else if (strstr(resolved, "/gpu/hip")) vec_type = VECHIP;
157         else vec_type = VECSTANDARD;
158       }
159     }
160     PetscCall(DMSetVecType(dm, vec_type));
161   }
162 
163   // Print summary
164   if (!test_mode) {
165     PetscInt    P = degree + 1, Q = P + q_extra;
166     const char *used_resource;
167     CeedGetResource(ceed, &used_resource);
168     PetscCall(PetscPrintf(comm,
169                           "\n-- CEED Benchmark Problem %" CeedInt_FMT " on the Sphere -- libCEED + PETSc --\n"
170                           "  libCEED:\n"
171                           "    libCEED Backend                         : %s\n"
172                           "    libCEED Backend MemType                 : %s\n"
173                           "  Mesh:\n"
174                           "    Solution Order (P)                      : %" CeedInt_FMT "\n"
175                           "    Quadrature  Order (Q)                   : %" CeedInt_FMT "\n"
176                           "    Additional quadrature points (q_extra)  : %" CeedInt_FMT "\n"
177                           "    Global nodes                            : %" PetscInt_FMT "\n",
178                           bp_choice + 1, ceed_resource, CeedMemTypes[mem_type_backend], P, Q, q_extra, g_size / num_comp_u));
179   }
180 
181   // Create RHS vector
182   PetscCall(VecDuplicate(X_loc, &rhs_loc));
183   PetscCall(VecZeroEntries(rhs_loc));
184   PetscCall(VecGetArrayAndMemType(rhs_loc, &r, &mem_type));
185   CeedVectorCreate(ceed, xl_size, &rhs_ceed);
186   CeedVectorSetArray(rhs_ceed, MemTypeP2C(mem_type), CEED_USE_POINTER, r);
187 
188   // Setup libCEED's objects
189   PetscCall(PetscMalloc1(1, &ceed_data));
190   PetscCall(SetupLibceedByDegree(dm, ceed, degree, topo_dim, q_extra, num_comp_x, num_comp_u, g_size, xl_size, bp_options[bp_choice], ceed_data, true,
191                                  rhs_ceed, &target));
192 
193   // Gather RHS
194   CeedVectorTakeArray(rhs_ceed, MemTypeP2C(mem_type), NULL);
195   PetscCall(VecRestoreArrayAndMemType(rhs_loc, &r));
196   PetscCall(VecZeroEntries(rhs));
197   PetscCall(DMLocalToGlobal(dm, rhs_loc, ADD_VALUES, rhs));
198   CeedVectorDestroy(&rhs_ceed);
199 
200   // Create the error Q-function
201   CeedQFunctionCreateInterior(ceed, 1, bp_options[bp_choice].error, bp_options[bp_choice].error_loc, &qf_error);
202   CeedQFunctionAddInput(qf_error, "u", num_comp_u, CEED_EVAL_INTERP);
203   CeedQFunctionAddInput(qf_error, "true_soln", num_comp_u, CEED_EVAL_NONE);
204   CeedQFunctionAddInput(qf_error, "qdata", ceed_data->q_data_size, CEED_EVAL_NONE);
205   CeedQFunctionAddOutput(qf_error, "error", num_comp_u, CEED_EVAL_INTERP);
206 
207   // Create the error operator
208   CeedOperatorCreate(ceed, qf_error, NULL, NULL, &op_error);
209   CeedOperatorSetField(op_error, "u", ceed_data->elem_restr_u, ceed_data->basis_u, CEED_VECTOR_ACTIVE);
210   CeedOperatorSetField(op_error, "true_soln", ceed_data->elem_restr_u_i, CEED_BASIS_COLLOCATED, target);
211   CeedOperatorSetField(op_error, "qdata", ceed_data->elem_restr_qd_i, CEED_BASIS_COLLOCATED, ceed_data->q_data);
212   CeedOperatorSetField(op_error, "error", ceed_data->elem_restr_u, ceed_data->basis_u, CEED_VECTOR_ACTIVE);
213 
214   // Set up apply operator context
215   PetscCall(SetupApplyOperatorCtx(comm, dm, ceed, ceed_data, X_loc, op_apply_ctx));
216 
217   // Setup solver
218   PetscCall(KSPCreate(comm, &ksp));
219   {
220     PC pc;
221     PetscCall(KSPGetPC(ksp, &pc));
222     if (bp_choice == CEED_BP1 || bp_choice == CEED_BP2) {
223       PetscCall(PCSetType(pc, PCJACOBI));
224       PetscCall(PCJacobiSetType(pc, PC_JACOBI_ROWSUM));
225     } else {
226       PetscCall(PCSetType(pc, PCNONE));
227       MatNullSpace nullspace;
228 
229       PetscCall(MatNullSpaceCreate(PETSC_COMM_WORLD, PETSC_TRUE, 0, 0, &nullspace));
230       PetscCall(MatSetNullSpace(mat_O, nullspace));
231       PetscCall(MatNullSpaceDestroy(&nullspace));
232     }
233     PetscCall(KSPSetType(ksp, KSPCG));
234     PetscCall(KSPSetNormType(ksp, KSP_NORM_NATURAL));
235     PetscCall(KSPSetTolerances(ksp, 1e-10, PETSC_DEFAULT, PETSC_DEFAULT, PETSC_DEFAULT));
236   }
237   PetscCall(KSPSetFromOptions(ksp));
238   PetscCall(KSPSetOperators(ksp, mat_O, mat_O));
239 
240   // First run, if benchmarking
241   if (benchmark_mode) {
242     PetscCall(KSPSetTolerances(ksp, 1e-10, PETSC_DEFAULT, PETSC_DEFAULT, 1));
243     my_rt_start = MPI_Wtime();
244     PetscCall(KSPSolve(ksp, rhs, X));
245     my_rt = MPI_Wtime() - my_rt_start;
246     PetscCall(MPI_Allreduce(MPI_IN_PLACE, &my_rt, 1, MPI_DOUBLE, MPI_MIN, comm));
247     // Set maxits based on first iteration timing
248     if (my_rt > 0.02) {
249       PetscCall(KSPSetTolerances(ksp, 1e-10, PETSC_DEFAULT, PETSC_DEFAULT, 5));
250     } else {
251       PetscCall(KSPSetTolerances(ksp, 1e-10, PETSC_DEFAULT, PETSC_DEFAULT, 20));
252     }
253   }
254 
255   // Timed solve
256   PetscCall(VecZeroEntries(X));
257   PetscCall(PetscBarrier((PetscObject)ksp));
258 
259   // -- Performance logging
260   PetscCall(PetscLogStageRegister("Solve Stage", &solve_stage));
261   PetscCall(PetscLogStagePush(solve_stage));
262 
263   // -- Solve
264   my_rt_start = MPI_Wtime();
265   PetscCall(KSPSolve(ksp, rhs, X));
266   my_rt = MPI_Wtime() - my_rt_start;
267 
268   // -- Performance logging
269   PetscCall(PetscLogStagePop());
270 
271   // Output results
272   {
273     KSPType            ksp_type;
274     KSPConvergedReason reason;
275     PetscReal          rnorm;
276     PetscInt           its;
277     PetscCall(KSPGetType(ksp, &ksp_type));
278     PetscCall(KSPGetConvergedReason(ksp, &reason));
279     PetscCall(KSPGetIterationNumber(ksp, &its));
280     PetscCall(KSPGetResidualNorm(ksp, &rnorm));
281     if (!test_mode || reason < 0 || rnorm > 1e-8) {
282       PetscCall(PetscPrintf(comm,
283                             "  KSP:\n"
284                             "    KSP Type                                : %s\n"
285                             "    KSP Convergence                         : %s\n"
286                             "    Total KSP Iterations                    : %" PetscInt_FMT "\n"
287                             "    Final rnorm                             : %e\n",
288                             ksp_type, KSPConvergedReasons[reason], its, (double)rnorm));
289     }
290     if (!test_mode) {
291       PetscCall(PetscPrintf(comm, "  Performance:\n"));
292     }
293     {
294       // Set up error operator context
295       PetscCall(SetupErrorOperatorCtx(comm, dm, ceed, ceed_data, X_loc, op_error, op_error_ctx));
296       PetscScalar l2_error;
297       PetscCall(ComputeL2Error(X, &l2_error, op_error_ctx));
298       PetscReal tol = 5e-4;
299       if (!test_mode || l2_error > tol) {
300         PetscCall(MPI_Allreduce(&my_rt, &rt_min, 1, MPI_DOUBLE, MPI_MIN, comm));
301         PetscCall(MPI_Allreduce(&my_rt, &rt_max, 1, MPI_DOUBLE, MPI_MAX, comm));
302         PetscCall(PetscPrintf(comm,
303                               "    L2 Error                                : %e\n"
304                               "    CG Solve Time                           : %g (%g) sec\n",
305                               (double)l2_error, rt_max, rt_min));
306       }
307     }
308     if (benchmark_mode && (!test_mode)) {
309       PetscCall(PetscPrintf(comm, "    DoFs/Sec in CG                            : %g (%g) million\n", 1e-6 * g_size * its / rt_max,
310                             1e-6 * g_size * its / rt_min));
311     }
312   }
313 
314   // Output solution
315   if (write_solution) {
316     PetscViewer vtk_viewer_soln;
317 
318     PetscCall(PetscViewerCreate(comm, &vtk_viewer_soln));
319     PetscCall(PetscViewerSetType(vtk_viewer_soln, PETSCVIEWERVTK));
320     PetscCall(PetscViewerFileSetName(vtk_viewer_soln, "solution.vtu"));
321     PetscCall(VecView(X, vtk_viewer_soln));
322     PetscCall(PetscViewerDestroy(&vtk_viewer_soln));
323   }
324 
325   // Cleanup
326   PetscCall(VecDestroy(&X));
327   PetscCall(VecDestroy(&X_loc));
328   PetscCall(VecDestroy(&op_apply_ctx->Y_loc));
329   PetscCall(VecDestroy(&op_error_ctx->Y_loc));
330   PetscCall(MatDestroy(&mat_O));
331   PetscCall(PetscFree(op_apply_ctx));
332   PetscCall(PetscFree(op_error_ctx));
333   PetscCall(CeedDataDestroy(0, ceed_data));
334   PetscCall(DMDestroy(&dm));
335 
336   PetscCall(VecDestroy(&rhs));
337   PetscCall(VecDestroy(&rhs_loc));
338   PetscCall(KSPDestroy(&ksp));
339   CeedVectorDestroy(&target);
340   CeedQFunctionDestroy(&qf_error);
341   CeedOperatorDestroy(&op_error);
342   CeedDestroy(&ceed);
343   return PetscFinalize();
344 }
345