xref: /libCEED/examples/petsc/bps.c (revision dada6cc03e18f0f18a02732451145fd10701e640)
1 // Copyright (c) 2017-2022, Lawrence Livermore National Security, LLC and other CEED contributors.
2 // All Rights Reserved. See the top-level LICENSE and NOTICE files for details.
3 //
4 // SPDX-License-Identifier: BSD-2-Clause
5 //
6 // This file is part of CEED:  http://github.com/ceed
7 
8 //                        libCEED + PETSc Example: CEED BPs
9 //
10 // This example demonstrates a simple usage of libCEED with PETSc to solve the
11 // CEED BP benchmark problems, see http://ceed.exascaleproject.org/bps.
12 //
13 // The code uses higher level communication protocols in DMPlex.
14 //
15 // Build with:
16 //
17 //     make bps [PETSC_DIR=</path/to/petsc>] [CEED_DIR=</path/to/libceed>]
18 //
19 // Sample runs:
20 //
21 //     ./bps -problem bp1 -degree 3
22 //     ./bps -problem bp2 -degree 3
23 //     ./bps -problem bp3 -degree 3
24 //     ./bps -problem bp4 -degree 3
25 //     ./bps -problem bp5 -degree 3 -ceed /cpu/self
26 //     ./bps -problem bp6 -degree 3 -ceed /gpu/cuda
27 //
28 //TESTARGS -ceed {ceed_resource} -test -problem bp5 -degree 3 -ksp_max_it_clip 15,15
29 
30 /// @file
31 /// CEED BPs example using PETSc with DMPlex
32 /// See bpsraw.c for a "raw" implementation using a structured grid.
33 const char help[] = "Solve CEED BPs using PETSc with DMPlex\n";
34 
35 #include <stdbool.h>
36 #include <string.h>
37 #include <ceed.h>
38 #include <petsc.h>
39 #include <petscdmplex.h>
40 #include <petscksp.h>
41 #include <petscsys.h>
42 
43 #include "bps.h"
44 #include "include/bpsproblemdata.h"
45 #include "include/petscutils.h"
46 #include "include/petscversion.h"
47 #include "include/matops.h"
48 #include "include/structs.h"
49 #include "include/libceedsetup.h"
50 
51 #if PETSC_VERSION_LT(3,12,0)
52 #ifdef PETSC_HAVE_CUDA
53 #include <petsccuda.h>
54 // Note: With PETSc prior to version 3.12.0, providing the source path to
55 //       include 'cublas_v2.h' will be needed to use 'petsccuda.h'.
56 #endif
57 #endif
58 
59 // -----------------------------------------------------------------------------
60 // Utilities
61 // -----------------------------------------------------------------------------
62 
63 // Utility function, compute three factors of an integer
64 static void Split3(PetscInt size, PetscInt m[3], bool reverse) {
65   for (PetscInt d=0, size_left=size; d<3; d++) {
66     PetscInt try = (PetscInt)PetscCeilReal(PetscPowReal(size_left, 1./(3 - d)));
67     while (try * (size_left / try) != size_left) try++;
68     m[reverse ? 2-d : d] = try;
69     size_left /= try;
70   }
71 }
72 
73 static int Max3(const PetscInt a[3]) {
74   return PetscMax(a[0], PetscMax(a[1], a[2]));
75 }
76 
77 static int Min3(const PetscInt a[3]) {
78   return PetscMin(a[0], PetscMin(a[1], a[2]));
79 }
80 
81 // -----------------------------------------------------------------------------
82 // Parameter structure for running problems
83 // -----------------------------------------------------------------------------
84 typedef struct RunParams_ *RunParams;
85 struct RunParams_ {
86   MPI_Comm comm;
87   PetscBool test_mode, read_mesh, user_l_nodes, write_solution;
88   char *filename, *hostname;
89   PetscInt local_nodes, degree, q_extra, dim, num_comp_u, *mesh_elem;
90   PetscInt ksp_max_it_clip[2];
91   PetscMPIInt ranks_per_node;
92   BPType bp_choice;
93   PetscLogStage solve_stage;
94 };
95 
96 // -----------------------------------------------------------------------------
97 // Main body of program, called in a loop for performance benchmarking purposes
98 // -----------------------------------------------------------------------------
99 static PetscErrorCode RunWithDM(RunParams rp, DM dm,
100                                 const char *ceed_resource) {
101   PetscErrorCode ierr;
102   double my_rt_start, my_rt, rt_min, rt_max;
103   PetscInt xl_size, l_size, g_size;
104   PetscScalar *r;
105   Vec X, X_loc, rhs, rhs_loc;
106   Mat mat_O;
107   KSP ksp;
108   UserO user_O;
109   Ceed ceed;
110   CeedData ceed_data;
111   CeedQFunction qf_error;
112   CeedOperator op_error;
113   CeedVector rhs_ceed, target;
114   VecType vec_type;
115   PetscMemType mem_type;
116 
117   PetscFunctionBeginUser;
118   // Set up libCEED
119   CeedInit(ceed_resource, &ceed);
120   CeedMemType mem_type_backend;
121   CeedGetPreferredMemType(ceed, &mem_type_backend);
122 
123   ierr = DMGetVecType(dm, &vec_type); CHKERRQ(ierr);
124   if (!vec_type) { // Not yet set by user -dm_vec_type
125     switch (mem_type_backend) {
126     case CEED_MEM_HOST: vec_type = VECSTANDARD; break;
127     case CEED_MEM_DEVICE: {
128       const char *resolved;
129       CeedGetResource(ceed, &resolved);
130       if (strstr(resolved, "/gpu/cuda")) vec_type = VECCUDA;
131       else if (strstr(resolved, "/gpu/hip/occa"))
132         vec_type = VECSTANDARD; // https://github.com/CEED/libCEED/issues/678
133       else if (strstr(resolved, "/gpu/hip")) vec_type = VECHIP;
134       else vec_type = VECSTANDARD;
135     }
136     }
137     ierr = DMSetVecType(dm, vec_type); CHKERRQ(ierr);
138   }
139 
140   // Create global and local solution vectors
141   ierr = DMCreateGlobalVector(dm, &X); CHKERRQ(ierr);
142   ierr = VecGetLocalSize(X, &l_size); CHKERRQ(ierr);
143   ierr = VecGetSize(X, &g_size); CHKERRQ(ierr);
144   ierr = DMCreateLocalVector(dm, &X_loc); CHKERRQ(ierr);
145   ierr = VecGetSize(X_loc, &xl_size); CHKERRQ(ierr);
146   ierr = VecDuplicate(X, &rhs); CHKERRQ(ierr);
147 
148   // Operator
149   ierr = PetscMalloc1(1, &user_O); CHKERRQ(ierr);
150   ierr = MatCreateShell(rp->comm, l_size, l_size, g_size, g_size,
151                         user_O, &mat_O); CHKERRQ(ierr);
152   ierr = MatShellSetOperation(mat_O, MATOP_MULT,
153                               (void(*)(void))MatMult_Ceed); CHKERRQ(ierr);
154   ierr = MatShellSetOperation(mat_O, MATOP_GET_DIAGONAL,
155                               (void(*)(void))MatGetDiag); CHKERRQ(ierr);
156   ierr = MatShellSetVecType(mat_O, vec_type); CHKERRQ(ierr);
157 
158   // Print summary
159   if (!rp->test_mode) {
160     PetscInt P = rp->degree + 1, Q = P + rp->q_extra;
161 
162     const char *used_resource;
163     CeedGetResource(ceed, &used_resource);
164 
165     VecType vec_type;
166     ierr = VecGetType(X, &vec_type); CHKERRQ(ierr);
167 
168     PetscInt c_start, c_end;
169     ierr = DMPlexGetHeightStratum(dm, 0, &c_start, &c_end); CHKERRQ(ierr);
170     PetscMPIInt comm_size;
171     ierr = MPI_Comm_size(rp->comm, &comm_size); CHKERRQ(ierr);
172     ierr = PetscPrintf(rp->comm,
173                        "\n-- CEED Benchmark Problem %" CeedInt_FMT " -- libCEED + PETSc --\n"
174                        "  MPI:\n"
175                        "    Hostname                           : %s\n"
176                        "    Total ranks                        : %d\n"
177                        "    Ranks per compute node             : %d\n"
178                        "  PETSc:\n"
179                        "    PETSc Vec Type                     : %s\n"
180                        "  libCEED:\n"
181                        "    libCEED Backend                    : %s\n"
182                        "    libCEED Backend MemType            : %s\n"
183                        "  Mesh:\n"
184                        "    Number of 1D Basis Nodes (P)       : %" CeedInt_FMT "\n"
185                        "    Number of 1D Quadrature Points (Q) : %" CeedInt_FMT "\n"
186                        "    Global nodes                       : %" PetscInt_FMT "\n"
187                        "    Local Elements                     : %" PetscInt_FMT "\n"
188                        "    Owned nodes                        : %" PetscInt_FMT "\n"
189                        "    DoF per node                       : %" PetscInt_FMT "\n",
190                        rp->bp_choice+1, rp->hostname, comm_size,
191                        rp->ranks_per_node, vec_type, used_resource,
192                        CeedMemTypes[mem_type_backend],
193                        P, Q, g_size/rp->num_comp_u, c_end - c_start, l_size/rp->num_comp_u,
194                        rp->num_comp_u);
195     CHKERRQ(ierr);
196   }
197 
198   // Create RHS vector
199   ierr = VecDuplicate(X_loc, &rhs_loc); CHKERRQ(ierr);
200   ierr = VecZeroEntries(rhs_loc); CHKERRQ(ierr);
201   ierr = VecGetArrayAndMemType(rhs_loc, &r, &mem_type); CHKERRQ(ierr);
202   CeedVectorCreate(ceed, xl_size, &rhs_ceed);
203   CeedVectorSetArray(rhs_ceed, MemTypeP2C(mem_type), CEED_USE_POINTER, r);
204 
205   ierr = PetscMalloc1(1, &ceed_data); CHKERRQ(ierr);
206   ierr = SetupLibceedByDegree(dm, ceed, rp->degree, rp->dim, rp->q_extra,
207                               rp->dim, rp->num_comp_u, g_size, xl_size, bp_options[rp->bp_choice],
208                               ceed_data, true, rhs_ceed, &target); CHKERRQ(ierr);
209 
210   // Gather RHS
211   CeedVectorTakeArray(rhs_ceed, MemTypeP2C(mem_type), NULL);
212   ierr = VecRestoreArrayAndMemType(rhs_loc, &r); CHKERRQ(ierr);
213   ierr = VecZeroEntries(rhs); CHKERRQ(ierr);
214   ierr = DMLocalToGlobal(dm, rhs_loc, ADD_VALUES, rhs); CHKERRQ(ierr);
215   CeedVectorDestroy(&rhs_ceed);
216 
217   // Create the error QFunction
218   CeedQFunctionCreateInterior(ceed, 1, bp_options[rp->bp_choice].error,
219                               bp_options[rp->bp_choice].error_loc, &qf_error);
220   CeedQFunctionAddInput(qf_error, "u", rp->num_comp_u, CEED_EVAL_INTERP);
221   CeedQFunctionAddInput(qf_error, "true_soln", rp->num_comp_u, CEED_EVAL_NONE);
222   CeedQFunctionAddOutput(qf_error, "error", rp->num_comp_u, CEED_EVAL_NONE);
223 
224   // Create the error operator
225   CeedOperatorCreate(ceed, qf_error, CEED_QFUNCTION_NONE, CEED_QFUNCTION_NONE,
226                      &op_error);
227   CeedOperatorSetField(op_error, "u", ceed_data->elem_restr_u,
228                        ceed_data->basis_u, CEED_VECTOR_ACTIVE);
229   CeedOperatorSetField(op_error, "true_soln", ceed_data->elem_restr_u_i,
230                        CEED_BASIS_COLLOCATED, target);
231   CeedOperatorSetField(op_error, "error", ceed_data->elem_restr_u_i,
232                        CEED_BASIS_COLLOCATED, CEED_VECTOR_ACTIVE);
233 
234   // Set up Mat
235   user_O->comm = rp->comm;
236   user_O->dm = dm;
237   user_O->X_loc = X_loc;
238   ierr = VecDuplicate(X_loc, &user_O->Y_loc); CHKERRQ(ierr);
239   user_O->x_ceed = ceed_data->x_ceed;
240   user_O->y_ceed = ceed_data->y_ceed;
241   user_O->op = ceed_data->op_apply;
242   user_O->ceed = ceed;
243 
244   ierr = KSPCreate(rp->comm, &ksp); CHKERRQ(ierr);
245   {
246     PC pc;
247     ierr = KSPGetPC(ksp, &pc); CHKERRQ(ierr);
248     if (rp->bp_choice == CEED_BP1 || rp->bp_choice == CEED_BP2) {
249       ierr = PCSetType(pc, PCJACOBI); CHKERRQ(ierr);
250       ierr = PCJacobiSetType(pc, PC_JACOBI_ROWSUM); CHKERRQ(ierr);
251     } else {
252       ierr = PCSetType(pc, PCNONE); CHKERRQ(ierr);
253     }
254     ierr = KSPSetType(ksp, KSPCG); CHKERRQ(ierr);
255     ierr = KSPSetNormType(ksp, KSP_NORM_NATURAL); CHKERRQ(ierr);
256     ierr = KSPSetTolerances(ksp, 1e-10, PETSC_DEFAULT, PETSC_DEFAULT,
257                             PETSC_DEFAULT); CHKERRQ(ierr);
258   }
259   ierr = KSPSetOperators(ksp, mat_O, mat_O); CHKERRQ(ierr);
260 
261   // First run's performance log is not considered for benchmarking purposes
262   ierr = KSPSetTolerances(ksp, 1e-10, PETSC_DEFAULT, PETSC_DEFAULT, 1);
263   CHKERRQ(ierr);
264   my_rt_start = MPI_Wtime();
265   ierr = KSPSolve(ksp, rhs, X); CHKERRQ(ierr);
266   my_rt = MPI_Wtime() - my_rt_start;
267   ierr = MPI_Allreduce(MPI_IN_PLACE, &my_rt, 1, MPI_DOUBLE, MPI_MIN, rp->comm);
268   CHKERRQ(ierr);
269   // Set maxits based on first iteration timing
270   if (my_rt > 0.02) {
271     ierr = KSPSetTolerances(ksp, 1e-10, PETSC_DEFAULT, PETSC_DEFAULT,
272                             rp->ksp_max_it_clip[0]);
273     CHKERRQ(ierr);
274   } else {
275     ierr = KSPSetTolerances(ksp, 1e-10, PETSC_DEFAULT, PETSC_DEFAULT,
276                             rp->ksp_max_it_clip[1]);
277     CHKERRQ(ierr);
278   }
279   ierr = KSPSetFromOptions(ksp); CHKERRQ(ierr);
280 
281   // Timed solve
282   ierr = VecZeroEntries(X); CHKERRQ(ierr);
283   ierr = PetscBarrier((PetscObject)ksp); CHKERRQ(ierr);
284 
285   // -- Performance logging
286   ierr = PetscLogStagePush(rp->solve_stage); CHKERRQ(ierr);
287 
288   // -- Solve
289   my_rt_start = MPI_Wtime();
290   ierr = KSPSolve(ksp, rhs, X); CHKERRQ(ierr);
291   my_rt = MPI_Wtime() - my_rt_start;
292 
293   // -- Performance logging
294   ierr = PetscLogStagePop();
295 
296   // Output results
297   {
298     KSPType ksp_type;
299     KSPConvergedReason reason;
300     PetscReal rnorm;
301     PetscInt its;
302     ierr = KSPGetType(ksp, &ksp_type); CHKERRQ(ierr);
303     ierr = KSPGetConvergedReason(ksp, &reason); CHKERRQ(ierr);
304     ierr = KSPGetIterationNumber(ksp, &its); CHKERRQ(ierr);
305     ierr = KSPGetResidualNorm(ksp, &rnorm); CHKERRQ(ierr);
306     if (!rp->test_mode || reason < 0 || rnorm > 1e-8) {
307       ierr = PetscPrintf(rp->comm,
308                          "  KSP:\n"
309                          "    KSP Type                           : %s\n"
310                          "    KSP Convergence                    : %s\n"
311                          "    Total KSP Iterations               : %" PetscInt_FMT "\n"
312                          "    Final rnorm                        : %e\n",
313                          ksp_type, KSPConvergedReasons[reason], its,
314                          (double)rnorm); CHKERRQ(ierr);
315     }
316     if (!rp->test_mode) {
317       ierr = PetscPrintf(rp->comm,"  Performance:\n"); CHKERRQ(ierr);
318     }
319     {
320       PetscReal max_error;
321       ierr = ComputeErrorMax(user_O, op_error, X, target, &max_error);
322       CHKERRQ(ierr);
323       PetscReal tol = 5e-2;
324       if (!rp->test_mode || max_error > tol) {
325         ierr = MPI_Allreduce(&my_rt, &rt_min, 1, MPI_DOUBLE, MPI_MIN, rp->comm);
326         CHKERRQ(ierr);
327         ierr = MPI_Allreduce(&my_rt, &rt_max, 1, MPI_DOUBLE, MPI_MAX, rp->comm);
328         CHKERRQ(ierr);
329         ierr = PetscPrintf(rp->comm,
330                            "    Pointwise Error (max)              : %e\n"
331                            "    CG Solve Time                      : %g (%g) sec\n",
332                            (double)max_error, rt_max, rt_min); CHKERRQ(ierr);
333       }
334     }
335     if (!rp->test_mode) {
336       ierr = PetscPrintf(rp->comm,
337                          "    DoFs/Sec in CG                     : %g (%g) million\n",
338                          1e-6*g_size*its/rt_max,
339                          1e-6*g_size*its/rt_min); CHKERRQ(ierr);
340     }
341   }
342 
343   if (rp->write_solution) {
344     PetscViewer vtk_viewer_soln;
345 
346     ierr = PetscViewerCreate(rp->comm, &vtk_viewer_soln); CHKERRQ(ierr);
347     ierr = PetscViewerSetType(vtk_viewer_soln, PETSCVIEWERVTK); CHKERRQ(ierr);
348     ierr = PetscViewerFileSetName(vtk_viewer_soln, "solution.vtu"); CHKERRQ(ierr);
349     ierr = VecView(X, vtk_viewer_soln); CHKERRQ(ierr);
350     ierr = PetscViewerDestroy(&vtk_viewer_soln); CHKERRQ(ierr);
351   }
352 
353   // Cleanup
354   ierr = VecDestroy(&X); CHKERRQ(ierr);
355   ierr = VecDestroy(&X_loc); CHKERRQ(ierr);
356   ierr = VecDestroy(&user_O->Y_loc); CHKERRQ(ierr);
357   ierr = MatDestroy(&mat_O); CHKERRQ(ierr);
358   ierr = PetscFree(user_O); CHKERRQ(ierr);
359   ierr = CeedDataDestroy(0, ceed_data); CHKERRQ(ierr);
360 
361   ierr = VecDestroy(&rhs); CHKERRQ(ierr);
362   ierr = VecDestroy(&rhs_loc); CHKERRQ(ierr);
363   ierr = KSPDestroy(&ksp); CHKERRQ(ierr);
364   CeedVectorDestroy(&target);
365   CeedQFunctionDestroy(&qf_error);
366   CeedOperatorDestroy(&op_error);
367   CeedDestroy(&ceed);
368   PetscFunctionReturn(0);
369 }
370 
371 static PetscErrorCode Run(RunParams rp, PetscInt num_resources,
372                           char *const *ceed_resources, PetscInt num_bp_choices,
373                           const BPType *bp_choices) {
374   PetscInt ierr;
375   DM dm;
376 
377   PetscFunctionBeginUser;
378   // Setup DM
379   if (rp->read_mesh) {
380     ierr = DMPlexCreateFromFile(PETSC_COMM_WORLD, rp->filename, NULL, PETSC_TRUE,
381                                 &dm);
382     CHKERRQ(ierr);
383   } else {
384     if (rp->user_l_nodes) {
385       // Find a nicely composite number of elements no less than global nodes
386       PetscMPIInt size;
387       ierr = MPI_Comm_size(rp->comm, &size); CHKERRQ(ierr);
388       for (PetscInt g_elem =
389              PetscMax(1, size * rp->local_nodes / PetscPowInt(rp->degree, rp->dim));
390            ;
391            g_elem++) {
392         Split3(g_elem, rp->mesh_elem, true);
393         if (Max3(rp->mesh_elem) / Min3(rp->mesh_elem) <= 2) break;
394       }
395     }
396     ierr = DMPlexCreateBoxMesh(PETSC_COMM_WORLD, rp->dim, PETSC_FALSE,
397                                rp->mesh_elem,
398                                NULL, NULL, NULL, PETSC_TRUE, &dm); CHKERRQ(ierr);
399   }
400 
401   ierr = DMSetFromOptions(dm); CHKERRQ(ierr);
402   ierr = DMViewFromOptions(dm, NULL, "-dm_view"); CHKERRQ(ierr);
403 
404   for (PetscInt b = 0; b < num_bp_choices; b++) {
405     DM dm_deg;
406     VecType vec_type;
407     PetscInt q_extra = rp->q_extra;
408     rp->bp_choice = bp_choices[b];
409     rp->num_comp_u = bp_options[rp->bp_choice].num_comp_u;
410     rp->q_extra = q_extra < 0 ? bp_options[rp->bp_choice].q_extra : q_extra;
411     ierr = DMClone(dm, &dm_deg); CHKERRQ(ierr);
412     ierr = DMGetVecType(dm, &vec_type); CHKERRQ(ierr);
413     ierr = DMSetVecType(dm_deg, vec_type); CHKERRQ(ierr);
414     // Create DM
415     PetscInt dim;
416     ierr = DMGetDimension(dm_deg, &dim); CHKERRQ(ierr);
417     ierr = SetupDMByDegree(dm_deg, rp->degree, rp->num_comp_u, dim,
418                            bp_options[rp->bp_choice].enforce_bc,
419                            bp_options[rp->bp_choice].bc_func); CHKERRQ(ierr);
420     for (PetscInt r = 0; r < num_resources; r++) {
421       ierr = RunWithDM(rp, dm_deg, ceed_resources[r]); CHKERRQ(ierr);
422     }
423     ierr = DMDestroy(&dm_deg); CHKERRQ(ierr);
424     rp->q_extra = q_extra;
425   }
426 
427   ierr = DMDestroy(&dm); CHKERRQ(ierr);
428   PetscFunctionReturn(0);
429 }
430 
431 int main(int argc, char **argv) {
432   PetscInt ierr, comm_size;
433   RunParams rp;
434   MPI_Comm comm;
435   char filename[PETSC_MAX_PATH_LEN];
436   char *ceed_resources[30];
437   PetscInt num_ceed_resources = 30;
438   char hostname[PETSC_MAX_PATH_LEN];
439 
440   PetscInt dim = 3, mesh_elem[3] = {3, 3, 3};
441   PetscInt num_degrees = 30, degree[30] = {}, num_local_nodes = 2,
442                                           local_nodes[2] = {};
443   PetscMPIInt ranks_per_node;
444   PetscBool degree_set;
445   BPType bp_choices[10];
446   PetscInt num_bp_choices = 10;
447 
448   // Initialize PETSc
449   ierr = PetscInitialize(&argc, &argv, NULL, help);
450   if (ierr) return ierr;
451   comm = PETSC_COMM_WORLD;
452   ierr = MPI_Comm_size(comm, &comm_size);
453   if (ierr != MPI_SUCCESS) return ierr;
454   #if defined(PETSC_HAVE_MPI_PROCESS_SHARED_MEMORY)
455   {
456     MPI_Comm splitcomm;
457     ierr = MPI_Comm_split_type(comm, MPI_COMM_TYPE_SHARED, 0, MPI_INFO_NULL,
458                                &splitcomm);
459     CHKERRQ(ierr);
460     ierr = MPI_Comm_size(splitcomm, &ranks_per_node); CHKERRQ(ierr);
461     ierr = MPI_Comm_free(&splitcomm); CHKERRQ(ierr);
462   }
463   #else
464   ranks_per_node = -1; // Unknown
465   #endif
466 
467   // Setup all parameters needed in Run()
468   ierr = PetscMalloc1(1, &rp); CHKERRQ(ierr);
469   rp->comm = comm;
470 
471   // Read command line options
472   PetscOptionsBegin(comm, NULL, "CEED BPs in PETSc", NULL);
473   {
474     PetscBool set;
475     ierr = PetscOptionsEnumArray("-problem", "CEED benchmark problem to solve",
476                                  NULL,
477                                  bp_types, (PetscEnum *)bp_choices, &num_bp_choices, &set);
478     CHKERRQ(ierr);
479     if (!set) {
480       bp_choices[0] = CEED_BP1;
481       num_bp_choices = 1;
482     }
483   }
484   rp->test_mode = PETSC_FALSE;
485   ierr = PetscOptionsBool("-test",
486                           "Testing mode (do not print unless error is large)",
487                           NULL, rp->test_mode, &rp->test_mode, NULL); CHKERRQ(ierr);
488   rp->write_solution = PETSC_FALSE;
489   ierr = PetscOptionsBool("-write_solution", "Write solution for visualization",
490                           NULL, rp->write_solution, &rp->write_solution, NULL);
491   CHKERRQ(ierr);
492   degree[0] = rp->test_mode ? 3 : 2;
493   ierr = PetscOptionsIntArray("-degree",
494                               "Polynomial degree of tensor product basis", NULL,
495                               degree, &num_degrees, &degree_set); CHKERRQ(ierr);
496   if (!degree_set)
497     num_degrees = 1;
498   rp->q_extra = PETSC_DECIDE;
499   ierr = PetscOptionsInt("-q_extra",
500                          "Number of extra quadrature points (-1 for auto)", NULL,
501                          rp->q_extra, &rp->q_extra, NULL); CHKERRQ(ierr);
502   {
503     PetscBool set;
504     ierr = PetscOptionsStringArray("-ceed",
505                                    "CEED resource specifier (comma-separated list)", NULL,
506                                    ceed_resources, &num_ceed_resources, &set); CHKERRQ(ierr);
507     if (!set) {
508       ierr = PetscStrallocpy( "/cpu/self", &ceed_resources[0]); CHKERRQ(ierr);
509       num_ceed_resources = 1;
510     }
511   }
512   ierr = PetscGetHostName(hostname, sizeof hostname); CHKERRQ(ierr);
513   ierr = PetscOptionsString("-hostname", "Hostname for output", NULL, hostname,
514                             hostname, sizeof(hostname), NULL); CHKERRQ(ierr);
515   rp->read_mesh = PETSC_FALSE;
516   ierr = PetscOptionsString("-mesh", "Read mesh from file", NULL, filename,
517                             filename, sizeof(filename), &rp->read_mesh);
518   CHKERRQ(ierr);
519   rp->filename = filename;
520   if (!rp->read_mesh) {
521     PetscInt tmp = dim;
522     ierr = PetscOptionsIntArray("-cells", "Number of cells per dimension", NULL,
523                                 mesh_elem, &tmp, NULL); CHKERRQ(ierr);
524   }
525   local_nodes[0] = 1000;
526   ierr = PetscOptionsIntArray("-local_nodes",
527                               "Target number of locally owned nodes per "
528                               "process (single value or min,max)",
529                               NULL, local_nodes, &num_local_nodes, &rp->user_l_nodes);
530   CHKERRQ(ierr);
531   if (num_local_nodes < 2)
532     local_nodes[1] = 2 * local_nodes[0];
533   {
534     PetscInt two = 2;
535     rp->ksp_max_it_clip[0] = 5;
536     rp->ksp_max_it_clip[1] = 20;
537     ierr = PetscOptionsIntArray("-ksp_max_it_clip",
538                                 "Min and max number of iterations to use during benchmarking",
539                                 NULL, rp->ksp_max_it_clip, &two, NULL); CHKERRQ(ierr);
540   }
541   if (!degree_set) {
542     PetscInt max_degree = 8;
543     ierr = PetscOptionsInt("-max_degree",
544                            "Range of degrees [1, max_degree] to run with",
545                            NULL, max_degree, &max_degree, NULL);
546     CHKERRQ(ierr);
547     for (PetscInt i = 0; i < max_degree; i++)
548       degree[i] = i + 1;
549     num_degrees = max_degree;
550   }
551   {
552     PetscBool flg;
553     PetscInt p = ranks_per_node;
554     ierr = PetscOptionsInt("-p", "Number of MPI ranks per node", NULL,
555                            p, &p, &flg);
556     CHKERRQ(ierr);
557     if (flg) ranks_per_node = p;
558   }
559 
560   PetscOptionsEnd();
561 
562   // Register PETSc logging stage
563   ierr = PetscLogStageRegister("Solve Stage", &rp->solve_stage);
564   CHKERRQ(ierr);
565 
566   rp->hostname = hostname;
567   rp->dim = dim;
568   rp->mesh_elem = mesh_elem;
569   rp->ranks_per_node = ranks_per_node;
570 
571   for (PetscInt d = 0; d < num_degrees; d++) {
572     PetscInt deg = degree[d];
573     for (PetscInt n = local_nodes[0]; n < local_nodes[1]; n *= 2) {
574       rp->degree = deg;
575       rp->local_nodes = n;
576       ierr = Run(rp, num_ceed_resources, ceed_resources,
577                  num_bp_choices, bp_choices); CHKERRQ(ierr);
578     }
579   }
580   // Clear memory
581   ierr = PetscFree(rp); CHKERRQ(ierr);
582   for (PetscInt i=0; i<num_ceed_resources; i++) {
583     ierr = PetscFree(ceed_resources[i]); CHKERRQ(ierr);
584   }
585   return PetscFinalize();
586 }
587