xref: /libCEED/examples/petsc/bpsraw.c (revision bd9c22ba9c10c91da67bf629a19e4aaff2bdb5f2)
1 // Copyright (c) 2017-2022, Lawrence Livermore National Security, LLC and other CEED contributors.
2 // All Rights Reserved. See the top-level LICENSE and NOTICE files for details.
3 //
4 // SPDX-License-Identifier: BSD-2-Clause
5 //
6 // This file is part of CEED:  http://github.com/ceed
7 
8 //                        libCEED + PETSc Example: CEED BPs
9 //
10 // This example demonstrates a simple usage of libCEED with PETSc to solve the CEED BP benchmark problems, see http://ceed.exascaleproject.org/bps.
11 //
12 // The code is intentionally "raw", using only low-level communication primitives.
13 //
14 // Build with:
15 //
16 //     make bpsraw [PETSC_DIR=</path/to/petsc>] [CEED_DIR=</path/to/libceed>]
17 //
18 // Sample runs:
19 //
20 //     ./bpsraw -problem bp1
21 //     ./bpsraw -problem bp2
22 //     ./bpsraw -problem bp3
23 //     ./bpsraw -problem bp4
24 //     ./bpsraw -problem bp5 -ceed /cpu/self
25 //     ./bpsraw -problem bp6 -ceed /gpu/cuda
26 //
27 //TESTARGS -ceed {ceed_resource} -test -problem bp2 -degree 5 -q_extra 1 -ksp_max_it_clip 15,15
28 
29 /// @file
30 /// CEED BPs example using PETSc
31 /// See bps.c for an implementation using DMPlex unstructured grids.
32 const char help[] = "Solve CEED BPs using PETSc\n";
33 
34 #include <ceed.h>
35 #include <petscksp.h>
36 #include <petscsys.h>
37 #include <stdbool.h>
38 #include <string.h>
39 
40 #include "qfunctions/bps/bp1.h"
41 #include "qfunctions/bps/bp2.h"
42 #include "qfunctions/bps/bp3.h"
43 #include "qfunctions/bps/bp4.h"
44 #include "qfunctions/bps/common.h"
45 
46 #if PETSC_VERSION_LT(3, 12, 0)
47 #ifdef PETSC_HAVE_CUDA
48 #include <petsccuda.h>
49 // Note: With PETSc prior to version 3.12.0, providing the source path to include 'cublas_v2.h' will be needed to use 'petsccuda.h'.
50 #endif
51 #endif
52 
53 static CeedMemType MemTypeP2C(PetscMemType mem_type) { return PetscMemTypeDevice(mem_type) ? CEED_MEM_DEVICE : CEED_MEM_HOST; }
54 
55 static void Split3(PetscInt size, PetscInt m[3], bool reverse) {
56   for (PetscInt d = 0, size_left = size; d < 3; d++) {
57     PetscInt try = (PetscInt)PetscCeilReal(PetscPowReal(size_left, 1. / (3 - d)));
58     while (try * (size_left / try) != size_left) try++;
59     m[reverse ? 2 - d : d] = try;
60     size_left /= try;
61   }
62 }
63 
64 static PetscInt Max3(const PetscInt a[3]) { return PetscMax(a[0], PetscMax(a[1], a[2])); }
65 static PetscInt Min3(const PetscInt a[3]) { return PetscMin(a[0], PetscMin(a[1], a[2])); }
66 static void     GlobalNodes(const PetscInt p[3], const PetscInt i_rank[3], PetscInt degree, const PetscInt mesh_elem[3], PetscInt m_nodes[3]) {
67       for (int d = 0; d < 3; d++) m_nodes[d] = degree * mesh_elem[d] + (i_rank[d] == p[d] - 1);
68 }
69 static PetscInt GlobalStart(const PetscInt p[3], const PetscInt i_rank[3], PetscInt degree, const PetscInt mesh_elem[3]) {
70   PetscInt start = 0;
71   // Dumb brute-force is easier to read
72   for (PetscInt i = 0; i < p[0]; i++) {
73     for (PetscInt j = 0; j < p[1]; j++) {
74       for (PetscInt k = 0; k < p[2]; k++) {
75         PetscInt m_nodes[3], ijk_rank[] = {i, j, k};
76         if (i == i_rank[0] && j == i_rank[1] && k == i_rank[2]) return start;
77         GlobalNodes(p, ijk_rank, degree, mesh_elem, m_nodes);
78         start += m_nodes[0] * m_nodes[1] * m_nodes[2];
79       }
80     }
81   }
82   return -1;
83 }
84 static PetscErrorCode CreateRestriction(Ceed ceed, const CeedInt mesh_elem[3], CeedInt P, CeedInt num_comp, CeedElemRestriction *elem_restr) {
85   const PetscInt num_elem = mesh_elem[0] * mesh_elem[1] * mesh_elem[2];
86   PetscInt       m_nodes[3], *idx, *idx_p;
87 
88   PetscFunctionBeginUser;
89   // Get indicies
90   for (int d = 0; d < 3; d++) m_nodes[d] = mesh_elem[d] * (P - 1) + 1;
91   idx_p = idx = malloc(num_elem * P * P * P * sizeof idx[0]);
92   for (CeedInt i = 0; i < mesh_elem[0]; i++) {
93     for (CeedInt j = 0; j < mesh_elem[1]; j++) {
94       for (CeedInt k = 0; k < mesh_elem[2]; k++, idx_p += P * P * P) {
95         for (CeedInt ii = 0; ii < P; ii++) {
96           for (CeedInt jj = 0; jj < P; jj++) {
97             for (CeedInt kk = 0; kk < P; kk++) {
98               if (0) {  // This is the C-style (i,j,k) ordering that I prefer
99                 idx_p[(ii * P + jj) * P + kk] = num_comp * (((i * (P - 1) + ii) * m_nodes[1] + (j * (P - 1) + jj)) * m_nodes[2] + (k * (P - 1) + kk));
100               } else {  // (k,j,i) ordering for consistency with MFEM example
101                 idx_p[ii + P * (jj + P * kk)] = num_comp * (((i * (P - 1) + ii) * m_nodes[1] + (j * (P - 1) + jj)) * m_nodes[2] + (k * (P - 1) + kk));
102               }
103             }
104           }
105         }
106       }
107     }
108   }
109 
110   // Setup CEED restriction
111   CeedElemRestrictionCreate(ceed, num_elem, P * P * P, num_comp, 1, m_nodes[0] * m_nodes[1] * m_nodes[2] * num_comp, CEED_MEM_HOST, CEED_OWN_POINTER,
112                             idx, elem_restr);
113 
114   PetscFunctionReturn(0);
115 }
116 
117 // Data for PETSc
118 typedef struct OperatorApplyContext_ *OperatorApplyContext;
119 struct OperatorApplyContext_ {
120   MPI_Comm     comm;
121   VecScatter   l_to_g;    // Scatter for all entries
122   VecScatter   l_to_g_0;  // Skip Dirichlet values
123   VecScatter   g_to_g_D;  // global-to-global; only Dirichlet values
124   Vec          X_loc, Y_loc;
125   CeedVector   x_ceed, y_ceed;
126   CeedOperator op;
127   CeedVector   q_data;
128   Ceed         ceed;
129 };
130 
131 // BP Options
132 typedef enum { CEED_BP1 = 0, CEED_BP2 = 1, CEED_BP3 = 2, CEED_BP4 = 3, CEED_BP5 = 4, CEED_BP6 = 5 } BPType;
133 static const char *const bp_types[] = {"bp1", "bp2", "bp3", "bp4", "bp5", "bp6", "BPType", "CEED_BP", 0};
134 
135 // BP specific data
136 typedef struct {
137   CeedInt           num_comp_u, q_data_size, q_extra;
138   CeedQFunctionUser setup_geo, setup_rhs, apply, error;
139   const char       *setup_geo_loc, *setup_rhs_loc, *apply_loc, *error_loc;
140   CeedEvalMode      in_mode, out_mode;
141   CeedQuadMode      q_mode;
142 } BPData;
143 
144 BPData bp_options[6] = {
145     [CEED_BP1] = {.num_comp_u    = 1,
146                   .q_data_size   = 1,
147                   .q_extra       = 1,
148                   .setup_geo     = SetupMassGeo,
149                   .setup_rhs     = SetupMassRhs,
150                   .apply         = Mass,
151                   .error         = Error,
152                   .setup_geo_loc = SetupMassGeo_loc,
153                   .setup_rhs_loc = SetupMassRhs_loc,
154                   .apply_loc     = Mass_loc,
155                   .error_loc     = Error_loc,
156                   .in_mode       = CEED_EVAL_INTERP,
157                   .out_mode      = CEED_EVAL_INTERP,
158                   .q_mode        = CEED_GAUSS        },
159     [CEED_BP2] = {.num_comp_u    = 3,
160                   .q_data_size   = 1,
161                   .q_extra       = 1,
162                   .setup_geo     = SetupMassGeo,
163                   .setup_rhs     = SetupMassRhs3,
164                   .apply         = Mass3,
165                   .error         = Error3,
166                   .setup_geo_loc = SetupMassGeo_loc,
167                   .setup_rhs_loc = SetupMassRhs3_loc,
168                   .apply_loc     = Mass3_loc,
169                   .error_loc     = Error3_loc,
170                   .in_mode       = CEED_EVAL_INTERP,
171                   .out_mode      = CEED_EVAL_INTERP,
172                   .q_mode        = CEED_GAUSS        },
173     [CEED_BP3] = {.num_comp_u    = 1,
174                   .q_data_size   = 7,
175                   .q_extra       = 1,
176                   .setup_geo     = SetupDiffGeo,
177                   .setup_rhs     = SetupDiffRhs,
178                   .apply         = Diff,
179                   .error         = Error,
180                   .setup_geo_loc = SetupDiffGeo_loc,
181                   .setup_rhs_loc = SetupDiffRhs_loc,
182                   .apply_loc     = Diff_loc,
183                   .error_loc     = Error_loc,
184                   .in_mode       = CEED_EVAL_GRAD,
185                   .out_mode      = CEED_EVAL_GRAD,
186                   .q_mode        = CEED_GAUSS        },
187     [CEED_BP4] = {.num_comp_u    = 3,
188                   .q_data_size   = 7,
189                   .q_extra       = 1,
190                   .setup_geo     = SetupDiffGeo,
191                   .setup_rhs     = SetupDiffRhs3,
192                   .apply         = Diff3,
193                   .error         = Error3,
194                   .setup_geo_loc = SetupDiffGeo_loc,
195                   .setup_rhs_loc = SetupDiffRhs3_loc,
196                   .apply_loc     = Diff3_loc,
197                   .error_loc     = Error3_loc,
198                   .in_mode       = CEED_EVAL_GRAD,
199                   .out_mode      = CEED_EVAL_GRAD,
200                   .q_mode        = CEED_GAUSS        },
201     [CEED_BP5] = {.num_comp_u    = 1,
202                   .q_data_size   = 7,
203                   .q_extra       = 0,
204                   .setup_geo     = SetupDiffGeo,
205                   .setup_rhs     = SetupDiffRhs,
206                   .apply         = Diff,
207                   .error         = Error,
208                   .setup_geo_loc = SetupDiffGeo_loc,
209                   .setup_rhs_loc = SetupDiffRhs_loc,
210                   .apply_loc     = Diff_loc,
211                   .error_loc     = Error_loc,
212                   .in_mode       = CEED_EVAL_GRAD,
213                   .out_mode      = CEED_EVAL_GRAD,
214                   .q_mode        = CEED_GAUSS_LOBATTO},
215     [CEED_BP6] = {.num_comp_u    = 3,
216                   .q_data_size   = 7,
217                   .q_extra       = 0,
218                   .setup_geo     = SetupDiffGeo,
219                   .setup_rhs     = SetupDiffRhs3,
220                   .apply         = Diff3,
221                   .error         = Error3,
222                   .setup_geo_loc = SetupDiffGeo_loc,
223                   .setup_rhs_loc = SetupDiffRhs3_loc,
224                   .apply_loc     = Diff3_loc,
225                   .error_loc     = Error3_loc,
226                   .in_mode       = CEED_EVAL_GRAD,
227                   .out_mode      = CEED_EVAL_GRAD,
228                   .q_mode        = CEED_GAUSS_LOBATTO}
229 };
230 
231 // This function uses libCEED to compute the action of the mass matrix
232 static PetscErrorCode MatMult_Mass(Mat A, Vec X, Vec Y) {
233   OperatorApplyContext op_apply_ctx;
234   PetscScalar         *x, *y;
235   PetscMemType         x_mem_type, y_mem_type;
236 
237   PetscFunctionBeginUser;
238 
239   PetscCall(MatShellGetContext(A, &op_apply_ctx));
240 
241   // Global-to-local
242   PetscCall(VecScatterBegin(op_apply_ctx->l_to_g, X, op_apply_ctx->X_loc, INSERT_VALUES, SCATTER_REVERSE));
243   PetscCall(VecScatterEnd(op_apply_ctx->l_to_g, X, op_apply_ctx->X_loc, INSERT_VALUES, SCATTER_REVERSE));
244 
245   // Setup libCEED vectors
246   PetscCall(VecGetArrayReadAndMemType(op_apply_ctx->X_loc, (const PetscScalar **)&x, &x_mem_type));
247   PetscCall(VecGetArrayAndMemType(op_apply_ctx->Y_loc, &y, &y_mem_type));
248   CeedVectorSetArray(op_apply_ctx->x_ceed, MemTypeP2C(x_mem_type), CEED_USE_POINTER, x);
249   CeedVectorSetArray(op_apply_ctx->y_ceed, MemTypeP2C(y_mem_type), CEED_USE_POINTER, y);
250 
251   // Apply libCEED operator
252   CeedOperatorApply(op_apply_ctx->op, op_apply_ctx->x_ceed, op_apply_ctx->y_ceed, CEED_REQUEST_IMMEDIATE);
253 
254   // Restore PETSc vectors
255   CeedVectorTakeArray(op_apply_ctx->x_ceed, MemTypeP2C(x_mem_type), NULL);
256   CeedVectorTakeArray(op_apply_ctx->y_ceed, MemTypeP2C(y_mem_type), NULL);
257   PetscCall(VecRestoreArrayReadAndMemType(op_apply_ctx->X_loc, (const PetscScalar **)&x));
258   PetscCall(VecRestoreArrayAndMemType(op_apply_ctx->Y_loc, &y));
259 
260   // Local-to-global
261   if (Y) {
262     PetscCall(VecZeroEntries(Y));
263     PetscCall(VecScatterBegin(op_apply_ctx->l_to_g, op_apply_ctx->Y_loc, Y, ADD_VALUES, SCATTER_FORWARD));
264     PetscCall(VecScatterEnd(op_apply_ctx->l_to_g, op_apply_ctx->Y_loc, Y, ADD_VALUES, SCATTER_FORWARD));
265   }
266   PetscFunctionReturn(0);
267 }
268 
269 // This function uses libCEED to compute the action of the Laplacian with Dirichlet boundary conditions
270 static PetscErrorCode MatMult_Diff(Mat A, Vec X, Vec Y) {
271   OperatorApplyContext op_apply_ctx;
272   PetscScalar         *x, *y;
273   PetscMemType         x_mem_type, y_mem_type;
274 
275   PetscFunctionBeginUser;
276 
277   PetscCall(MatShellGetContext(A, &op_apply_ctx));
278 
279   // Global-to-local
280   PetscCall(VecScatterBegin(op_apply_ctx->l_to_g_0, X, op_apply_ctx->X_loc, INSERT_VALUES, SCATTER_REVERSE));
281   PetscCall(VecScatterEnd(op_apply_ctx->l_to_g_0, X, op_apply_ctx->X_loc, INSERT_VALUES, SCATTER_REVERSE));
282 
283   // Setup libCEED vectors
284   PetscCall(VecGetArrayReadAndMemType(op_apply_ctx->X_loc, (const PetscScalar **)&x, &x_mem_type));
285   PetscCall(VecGetArrayAndMemType(op_apply_ctx->Y_loc, &y, &y_mem_type));
286   CeedVectorSetArray(op_apply_ctx->x_ceed, MemTypeP2C(x_mem_type), CEED_USE_POINTER, x);
287   CeedVectorSetArray(op_apply_ctx->y_ceed, MemTypeP2C(y_mem_type), CEED_USE_POINTER, y);
288 
289   // Apply libCEED operator
290   CeedOperatorApply(op_apply_ctx->op, op_apply_ctx->x_ceed, op_apply_ctx->y_ceed, CEED_REQUEST_IMMEDIATE);
291 
292   // Restore PETSc vectors
293   CeedVectorTakeArray(op_apply_ctx->x_ceed, MemTypeP2C(x_mem_type), NULL);
294   CeedVectorTakeArray(op_apply_ctx->y_ceed, MemTypeP2C(y_mem_type), NULL);
295   PetscCall(VecRestoreArrayReadAndMemType(op_apply_ctx->X_loc, (const PetscScalar **)&x));
296   PetscCall(VecRestoreArrayAndMemType(op_apply_ctx->Y_loc, &y));
297 
298   // Local-to-global
299   PetscCall(VecZeroEntries(Y));
300   PetscCall(VecScatterBegin(op_apply_ctx->g_to_g_D, X, Y, INSERT_VALUES, SCATTER_FORWARD));
301   PetscCall(VecScatterEnd(op_apply_ctx->g_to_g_D, X, Y, INSERT_VALUES, SCATTER_FORWARD));
302   PetscCall(VecScatterBegin(op_apply_ctx->l_to_g_0, op_apply_ctx->Y_loc, Y, ADD_VALUES, SCATTER_FORWARD));
303   PetscCall(VecScatterEnd(op_apply_ctx->l_to_g_0, op_apply_ctx->Y_loc, Y, ADD_VALUES, SCATTER_FORWARD));
304 
305   PetscFunctionReturn(0);
306 }
307 
308 // This function calculates the error in the final solution
309 static PetscErrorCode ComputeErrorMax(OperatorApplyContext op_apply_ctx, CeedOperator op_error, Vec X, CeedVector target, PetscReal *max_error) {
310   PetscScalar *x;
311   PetscMemType mem_type;
312   CeedVector   collocated_error;
313   CeedSize     length;
314 
315   PetscFunctionBeginUser;
316 
317   CeedVectorGetLength(target, &length);
318   CeedVectorCreate(op_apply_ctx->ceed, length, &collocated_error);
319 
320   // Global-to-local
321   PetscCall(VecScatterBegin(op_apply_ctx->l_to_g, X, op_apply_ctx->X_loc, INSERT_VALUES, SCATTER_REVERSE));
322   PetscCall(VecScatterEnd(op_apply_ctx->l_to_g, X, op_apply_ctx->X_loc, INSERT_VALUES, SCATTER_REVERSE));
323 
324   // Setup libCEED vector
325   PetscCall(VecGetArrayReadAndMemType(op_apply_ctx->X_loc, (const PetscScalar **)&x, &mem_type));
326   CeedVectorSetArray(op_apply_ctx->x_ceed, MemTypeP2C(mem_type), CEED_USE_POINTER, x);
327 
328   // Apply libCEED operator
329   CeedOperatorApply(op_error, op_apply_ctx->x_ceed, collocated_error, CEED_REQUEST_IMMEDIATE);
330 
331   // Restore PETSc vector
332   CeedVectorTakeArray(op_apply_ctx->x_ceed, MemTypeP2C(mem_type), NULL);
333   PetscCall(VecRestoreArrayReadAndMemType(op_apply_ctx->X_loc, (const PetscScalar **)&x));
334 
335   // Reduce max error
336   *max_error = 0;
337   const CeedScalar *e;
338   CeedVectorGetArrayRead(collocated_error, CEED_MEM_HOST, &e);
339   for (CeedInt i = 0; i < length; i++) {
340     *max_error = PetscMax(*max_error, PetscAbsScalar(e[i]));
341   }
342   CeedVectorRestoreArrayRead(collocated_error, &e);
343   PetscCall(MPI_Allreduce(MPI_IN_PLACE, max_error, 1, MPIU_REAL, MPIU_MAX, op_apply_ctx->comm));
344 
345   // Cleanup
346   CeedVectorDestroy(&collocated_error);
347 
348   PetscFunctionReturn(0);
349 }
350 
351 int main(int argc, char **argv) {
352   MPI_Comm comm;
353   char     ceed_resource[PETSC_MAX_PATH_LEN] = "/cpu/self";
354   double   my_rt_start, my_rt, rt_min, rt_max;
355   PetscInt degree, q_extra, local_nodes, local_elem, mesh_elem[3], m_nodes[3], p[3], i_rank[3], l_nodes[3], l_size, num_comp_u = 1,
356                                                                                                                     ksp_max_it_clip[2];
357   PetscScalar         *r;
358   PetscBool            test_mode, benchmark_mode, write_solution;
359   PetscMPIInt          size, rank;
360   PetscLogStage        solve_stage;
361   Vec                  X, X_loc, rhs, rhs_loc;
362   Mat                  mat;
363   KSP                  ksp;
364   VecScatter           l_to_g, l_to_g_0, g_to_g_D;
365   PetscMemType         mem_type;
366   OperatorApplyContext op_apply_ctx;
367   Ceed                 ceed;
368   CeedBasis            basis_x, basis_u;
369   CeedElemRestriction  elem_restr_x, elem_restr_u, elem_restr_u_i, elem_restr_qd_i;
370   CeedQFunction        qf_setup_geo, qf_setup_rhs, qf_apply, qf_error;
371   CeedOperator         op_setup_geo, op_setup_rhs, op_apply, op_error;
372   CeedVector           x_coord, q_data, rhs_ceed, target;
373   CeedInt              P, Q;
374   const CeedInt        dim = 3, num_comp_x = 3;
375   BPType               bp_choice;
376 
377   PetscCall(PetscInitialize(&argc, &argv, NULL, help));
378   comm = PETSC_COMM_WORLD;
379 
380   // Read command line options
381   PetscOptionsBegin(comm, NULL, "CEED BPs in PETSc", NULL);
382   bp_choice = CEED_BP1;
383   PetscCall(PetscOptionsEnum("-problem", "CEED benchmark problem to solve", NULL, bp_types, (PetscEnum)bp_choice, (PetscEnum *)&bp_choice, NULL));
384   num_comp_u = bp_options[bp_choice].num_comp_u;
385   test_mode  = PETSC_FALSE;
386   PetscCall(PetscOptionsBool("-test", "Testing mode (do not print unless error is large)", NULL, test_mode, &test_mode, NULL));
387   benchmark_mode = PETSC_FALSE;
388   PetscCall(PetscOptionsBool("-benchmark", "Benchmarking mode (prints benchmark statistics)", NULL, benchmark_mode, &benchmark_mode, NULL));
389   write_solution = PETSC_FALSE;
390   PetscCall(PetscOptionsBool("-write_solution", "Write solution for visualization", NULL, write_solution, &write_solution, NULL));
391   degree = test_mode ? 3 : 1;
392   PetscCall(PetscOptionsInt("-degree", "Polynomial degree of tensor product basis", NULL, degree, &degree, NULL));
393   q_extra = bp_options[bp_choice].q_extra;
394   PetscCall(PetscOptionsInt("-q_extra", "Number of extra quadrature points", NULL, q_extra, &q_extra, NULL));
395   PetscCall(PetscOptionsString("-ceed", "CEED resource specifier", NULL, ceed_resource, ceed_resource, sizeof(ceed_resource), NULL));
396   local_nodes = 1000;
397   PetscCall(PetscOptionsInt("-local", "Target number of locally owned nodes per process", NULL, local_nodes, &local_nodes, NULL));
398   PetscInt two       = 2;
399   ksp_max_it_clip[0] = 5;
400   ksp_max_it_clip[1] = 20;
401   PetscCall(
402       PetscOptionsIntArray("-ksp_max_it_clip", "Min and max number of iterations to use during benchmarking", NULL, ksp_max_it_clip, &two, NULL));
403   PetscOptionsEnd();
404   P = degree + 1;
405   Q = P + q_extra;
406 
407   // Set up libCEED
408   CeedInit(ceed_resource, &ceed);
409   CeedMemType mem_type_backend;
410   CeedGetPreferredMemType(ceed, &mem_type_backend);
411 
412   VecType default_vec_type = NULL, vec_type;
413   switch (mem_type_backend) {
414     case CEED_MEM_HOST:
415       default_vec_type = VECSTANDARD;
416       break;
417     case CEED_MEM_DEVICE: {
418       const char *resolved;
419       CeedGetResource(ceed, &resolved);
420       if (strstr(resolved, "/gpu/cuda")) default_vec_type = VECCUDA;
421       else if (strstr(resolved, "/gpu/hip/occa")) default_vec_type = VECSTANDARD;  // https://github.com/CEED/libCEED/issues/678
422       else if (strstr(resolved, "/gpu/hip")) default_vec_type = VECHIP;
423       else default_vec_type = VECSTANDARD;
424     }
425   }
426 
427   // Determine size of process grid
428   PetscCall(MPI_Comm_size(comm, &size));
429   Split3(size, p, false);
430 
431   // Find a nicely composite number of elements no less than local_nodes
432   for (local_elem = PetscMax(1, local_nodes / (degree * degree * degree));; local_elem++) {
433     Split3(local_elem, mesh_elem, true);
434     if (Max3(mesh_elem) / Min3(mesh_elem) <= 2) break;
435   }
436 
437   // Find my location in the process grid
438   PetscCall(MPI_Comm_rank(comm, &rank));
439   for (int d = 0, rank_left = rank; d < dim; d++) {
440     const int pstride[3] = {p[1] * p[2], p[2], 1};
441     i_rank[d]            = rank_left / pstride[d];
442     rank_left -= i_rank[d] * pstride[d];
443   }
444 
445   GlobalNodes(p, i_rank, degree, mesh_elem, m_nodes);
446 
447   // Setup global vector
448   PetscCall(VecCreate(comm, &X));
449   PetscCall(VecSetType(X, default_vec_type));
450   PetscCall(VecSetSizes(X, m_nodes[0] * m_nodes[1] * m_nodes[2] * num_comp_u, PETSC_DECIDE));
451   PetscCall(VecSetFromOptions(X));
452   PetscCall(VecSetUp(X));
453 
454   // Set up libCEED
455   CeedInit(ceed_resource, &ceed);
456 
457   // Print summary
458   CeedInt gsize;
459   PetscCall(VecGetSize(X, &gsize));
460   if (!test_mode) {
461     const char *used_resource;
462     CeedGetResource(ceed, &used_resource);
463 
464     PetscCall(VecGetType(X, &vec_type));
465 
466     PetscCall(PetscPrintf(comm,
467                           "\n-- CEED Benchmark Problem %" CeedInt_FMT " -- libCEED + PETSc --\n"
468                           "  PETSc:\n"
469                           "    PETSc Vec Type                     : %s\n"
470                           "  libCEED:\n"
471                           "    libCEED Backend                    : %s\n"
472                           "    libCEED Backend MemType            : %s\n"
473                           "  Mesh:\n"
474                           "    Solution Order (P)                 : %" CeedInt_FMT "\n"
475                           "    Quadrature  Order (Q)              : %" CeedInt_FMT "\n"
476                           "    Global nodes                       : %" PetscInt_FMT "\n"
477                           "    Process Decomposition              : %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT "\n"
478                           "    Local Elements                     : %" PetscInt_FMT " = %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT "\n"
479                           "    Owned nodes                        : %" PetscInt_FMT " = %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT "\n"
480                           "    DoF per node                       : %" PetscInt_FMT "\n",
481                           bp_choice + 1, vec_type, used_resource, CeedMemTypes[mem_type_backend], P, Q, gsize / num_comp_u, p[0], p[1], p[2],
482                           local_elem, mesh_elem[0], mesh_elem[1], mesh_elem[2], m_nodes[0] * m_nodes[1] * m_nodes[2], m_nodes[0], m_nodes[1],
483                           m_nodes[2], num_comp_u));
484   }
485 
486   {
487     l_size = 1;
488     for (int d = 0; d < dim; d++) {
489       l_nodes[d] = mesh_elem[d] * degree + 1;
490       l_size *= l_nodes[d];
491     }
492     PetscCall(VecCreate(PETSC_COMM_SELF, &X_loc));
493     PetscCall(VecSetType(X_loc, default_vec_type));
494     PetscCall(VecSetSizes(X_loc, l_size * num_comp_u, PETSC_DECIDE));
495     PetscCall(VecSetFromOptions(X_loc));
496     PetscCall(VecSetUp(X_loc));
497 
498     // Create local-to-global scatter
499     PetscInt *l_to_g_ind, *l_to_g_ind_0, *loc_ind, l_0_count;
500     IS        l_to_g_is, l_to_g_is_0, loc_is;
501     PetscInt  g_start[2][2][2], g_m_nodes[2][2][2][dim];
502 
503     for (int i = 0; i < 2; i++) {
504       for (int j = 0; j < 2; j++) {
505         for (int k = 0; k < 2; k++) {
506           PetscInt ijk_rank[3] = {i_rank[0] + i, i_rank[1] + j, i_rank[2] + k};
507           g_start[i][j][k]     = GlobalStart(p, ijk_rank, degree, mesh_elem);
508           GlobalNodes(p, ijk_rank, degree, mesh_elem, g_m_nodes[i][j][k]);
509         }
510       }
511     }
512 
513     PetscCall(PetscMalloc1(l_size, &l_to_g_ind));
514     PetscCall(PetscMalloc1(l_size, &l_to_g_ind_0));
515     PetscCall(PetscMalloc1(l_size, &loc_ind));
516     l_0_count = 0;
517     for (PetscInt i = 0, ir, ii; ir = i >= m_nodes[0], ii = i - ir * m_nodes[0], i < l_nodes[0]; i++) {
518       for (PetscInt j = 0, jr, jj; jr = j >= m_nodes[1], jj = j - jr * m_nodes[1], j < l_nodes[1]; j++) {
519         for (PetscInt k = 0, kr, kk; kr = k >= m_nodes[2], kk = k - kr * m_nodes[2], k < l_nodes[2]; k++) {
520           PetscInt here    = (i * l_nodes[1] + j) * l_nodes[2] + k;
521           l_to_g_ind[here] = g_start[ir][jr][kr] + (ii * g_m_nodes[ir][jr][kr][1] + jj) * g_m_nodes[ir][jr][kr][2] + kk;
522           if ((i_rank[0] == 0 && i == 0) || (i_rank[1] == 0 && j == 0) || (i_rank[2] == 0 && k == 0) ||
523               (i_rank[0] + 1 == p[0] && i + 1 == l_nodes[0]) || (i_rank[1] + 1 == p[1] && j + 1 == l_nodes[1]) ||
524               (i_rank[2] + 1 == p[2] && k + 1 == l_nodes[2]))
525             continue;
526           l_to_g_ind_0[l_0_count] = l_to_g_ind[here];
527           loc_ind[l_0_count++]    = here;
528         }
529       }
530     }
531     PetscCall(ISCreateBlock(comm, num_comp_u, l_size, l_to_g_ind, PETSC_OWN_POINTER, &l_to_g_is));
532     PetscCall(VecScatterCreate(X_loc, NULL, X, l_to_g_is, &l_to_g));
533     PetscCall(ISCreateBlock(comm, num_comp_u, l_0_count, l_to_g_ind_0, PETSC_OWN_POINTER, &l_to_g_is_0));
534     PetscCall(ISCreateBlock(comm, num_comp_u, l_0_count, loc_ind, PETSC_OWN_POINTER, &loc_is));
535     PetscCall(VecScatterCreate(X_loc, loc_is, X, l_to_g_is_0, &l_to_g_0));
536     {
537       // Create global-to-global scatter for Dirichlet values (everything not in l_to_g_is_0, which is the range of l_to_g_0)
538       PetscInt           x_start, x_end, *ind_D, count_D = 0;
539       IS                 is_D;
540       const PetscScalar *x;
541       PetscCall(VecZeroEntries(X_loc));
542       PetscCall(VecSet(X, 1.0));
543       PetscCall(VecScatterBegin(l_to_g_0, X_loc, X, INSERT_VALUES, SCATTER_FORWARD));
544       PetscCall(VecScatterEnd(l_to_g_0, X_loc, X, INSERT_VALUES, SCATTER_FORWARD));
545       PetscCall(VecGetOwnershipRange(X, &x_start, &x_end));
546       PetscCall(PetscMalloc1(x_end - x_start, &ind_D));
547       PetscCall(VecGetArrayRead(X, &x));
548       for (PetscInt i = 0; i < x_end - x_start; i++) {
549         if (x[i] == 1.) ind_D[count_D++] = x_start + i;
550       }
551       PetscCall(VecRestoreArrayRead(X, &x));
552       PetscCall(ISCreateGeneral(comm, count_D, ind_D, PETSC_COPY_VALUES, &is_D));
553       PetscCall(PetscFree(ind_D));
554       PetscCall(VecScatterCreate(X, is_D, X, is_D, &g_to_g_D));
555       PetscCall(ISDestroy(&is_D));
556     }
557     PetscCall(ISDestroy(&l_to_g_is));
558     PetscCall(ISDestroy(&l_to_g_is_0));
559     PetscCall(ISDestroy(&loc_is));
560   }
561 
562   // CEED bases
563   CeedBasisCreateTensorH1Lagrange(ceed, dim, num_comp_u, P, Q, bp_options[bp_choice].q_mode, &basis_u);
564   CeedBasisCreateTensorH1Lagrange(ceed, dim, num_comp_x, 2, Q, bp_options[bp_choice].q_mode, &basis_x);
565 
566   // CEED restrictions
567   CreateRestriction(ceed, mesh_elem, P, num_comp_u, &elem_restr_u);
568   CreateRestriction(ceed, mesh_elem, 2, dim, &elem_restr_x);
569   CeedInt num_elem = mesh_elem[0] * mesh_elem[1] * mesh_elem[2];
570   CeedElemRestrictionCreateStrided(ceed, num_elem, Q * Q * Q, num_comp_u, num_comp_u * num_elem * Q * Q * Q, CEED_STRIDES_BACKEND, &elem_restr_u_i);
571   CeedElemRestrictionCreateStrided(ceed, num_elem, Q * Q * Q, bp_options[bp_choice].q_data_size,
572                                    bp_options[bp_choice].q_data_size * num_elem * Q * Q * Q, CEED_STRIDES_BACKEND, &elem_restr_qd_i);
573   {
574     CeedScalar *x_loc;
575     CeedInt     shape[3] = {mesh_elem[0] + 1, mesh_elem[1] + 1, mesh_elem[2] + 1}, len = shape[0] * shape[1] * shape[2];
576     x_loc = malloc(len * num_comp_x * sizeof x_loc[0]);
577     for (CeedInt i = 0; i < shape[0]; i++) {
578       for (CeedInt j = 0; j < shape[1]; j++) {
579         for (CeedInt k = 0; k < shape[2]; k++) {
580           x_loc[dim * ((i * shape[1] + j) * shape[2] + k) + 0] = 1. * (i_rank[0] * mesh_elem[0] + i) / (p[0] * mesh_elem[0]);
581           x_loc[dim * ((i * shape[1] + j) * shape[2] + k) + 1] = 1. * (i_rank[1] * mesh_elem[1] + j) / (p[1] * mesh_elem[1]);
582           x_loc[dim * ((i * shape[1] + j) * shape[2] + k) + 2] = 1. * (i_rank[2] * mesh_elem[2] + k) / (p[2] * mesh_elem[2]);
583         }
584       }
585     }
586     CeedVectorCreate(ceed, len * num_comp_x, &x_coord);
587     CeedVectorSetArray(x_coord, CEED_MEM_HOST, CEED_OWN_POINTER, x_loc);
588   }
589 
590   // Create the QFunction that builds the operator quadrature data
591   CeedQFunctionCreateInterior(ceed, 1, bp_options[bp_choice].setup_geo, bp_options[bp_choice].setup_geo_loc, &qf_setup_geo);
592   CeedQFunctionAddInput(qf_setup_geo, "x", num_comp_x, CEED_EVAL_INTERP);
593   CeedQFunctionAddInput(qf_setup_geo, "dx", num_comp_x * dim, CEED_EVAL_GRAD);
594   CeedQFunctionAddInput(qf_setup_geo, "weight", 1, CEED_EVAL_WEIGHT);
595   CeedQFunctionAddOutput(qf_setup_geo, "q_data", bp_options[bp_choice].q_data_size, CEED_EVAL_NONE);
596 
597   // Create the QFunction that sets up the RHS and true solution
598   CeedQFunctionCreateInterior(ceed, 1, bp_options[bp_choice].setup_rhs, bp_options[bp_choice].setup_rhs_loc, &qf_setup_rhs);
599   CeedQFunctionAddInput(qf_setup_rhs, "x", num_comp_x, CEED_EVAL_INTERP);
600   CeedQFunctionAddInput(qf_setup_rhs, "q_data", bp_options[bp_choice].q_data_size, CEED_EVAL_NONE);
601   CeedQFunctionAddOutput(qf_setup_rhs, "true_soln", num_comp_u, CEED_EVAL_NONE);
602   CeedQFunctionAddOutput(qf_setup_rhs, "rhs", num_comp_u, CEED_EVAL_INTERP);
603 
604   // Set up PDE operator
605   CeedQFunctionCreateInterior(ceed, 1, bp_options[bp_choice].apply, bp_options[bp_choice].apply_loc, &qf_apply);
606   // Add inputs and outputs
607   CeedInt in_scale  = bp_options[bp_choice].in_mode == CEED_EVAL_GRAD ? 3 : 1;
608   CeedInt out_scale = bp_options[bp_choice].out_mode == CEED_EVAL_GRAD ? 3 : 1;
609   CeedQFunctionAddInput(qf_apply, "u", num_comp_u * in_scale, bp_options[bp_choice].in_mode);
610   CeedQFunctionAddInput(qf_apply, "q_data", bp_options[bp_choice].q_data_size, CEED_EVAL_NONE);
611   CeedQFunctionAddOutput(qf_apply, "v", num_comp_u * out_scale, bp_options[bp_choice].out_mode);
612 
613   // Create the error qfunction
614   CeedQFunctionCreateInterior(ceed, 1, bp_options[bp_choice].error, bp_options[bp_choice].error_loc, &qf_error);
615   CeedQFunctionAddInput(qf_error, "u", num_comp_u, CEED_EVAL_INTERP);
616   CeedQFunctionAddInput(qf_error, "true_soln", num_comp_u, CEED_EVAL_NONE);
617   CeedQFunctionAddInput(qf_error, "qdata", bp_options[bp_choice].q_data_size, CEED_EVAL_NONE);
618   CeedQFunctionAddOutput(qf_error, "error", num_comp_u, CEED_EVAL_NONE);
619 
620   // Create the persistent vectors that will be needed in setup
621   CeedInt num_qpts;
622   CeedBasisGetNumQuadraturePoints(basis_u, &num_qpts);
623   CeedVectorCreate(ceed, bp_options[bp_choice].q_data_size * num_elem * num_qpts, &q_data);
624   CeedVectorCreate(ceed, num_elem * num_qpts * num_comp_u, &target);
625   CeedVectorCreate(ceed, l_size * num_comp_u, &rhs_ceed);
626 
627   // Create the operator that builds the quadrature data for the ceed operator
628   CeedOperatorCreate(ceed, qf_setup_geo, CEED_QFUNCTION_NONE, CEED_QFUNCTION_NONE, &op_setup_geo);
629   CeedOperatorSetField(op_setup_geo, "x", elem_restr_x, basis_x, CEED_VECTOR_ACTIVE);
630   CeedOperatorSetField(op_setup_geo, "dx", elem_restr_x, basis_x, CEED_VECTOR_ACTIVE);
631   CeedOperatorSetField(op_setup_geo, "weight", CEED_ELEMRESTRICTION_NONE, basis_x, CEED_VECTOR_NONE);
632   CeedOperatorSetField(op_setup_geo, "q_data", elem_restr_qd_i, CEED_BASIS_COLLOCATED, CEED_VECTOR_ACTIVE);
633 
634   // Create the operator that builds the RHS and true solution
635   CeedOperatorCreate(ceed, qf_setup_rhs, CEED_QFUNCTION_NONE, CEED_QFUNCTION_NONE, &op_setup_rhs);
636   CeedOperatorSetField(op_setup_rhs, "x", elem_restr_x, basis_x, CEED_VECTOR_ACTIVE);
637   CeedOperatorSetField(op_setup_rhs, "q_data", elem_restr_qd_i, CEED_BASIS_COLLOCATED, q_data);
638   CeedOperatorSetField(op_setup_rhs, "true_soln", elem_restr_u_i, CEED_BASIS_COLLOCATED, target);
639   CeedOperatorSetField(op_setup_rhs, "rhs", elem_restr_u, basis_u, CEED_VECTOR_ACTIVE);
640 
641   // Create the mass or diff operator
642   CeedOperatorCreate(ceed, qf_apply, CEED_QFUNCTION_NONE, CEED_QFUNCTION_NONE, &op_apply);
643   CeedOperatorSetField(op_apply, "u", elem_restr_u, basis_u, CEED_VECTOR_ACTIVE);
644   CeedOperatorSetField(op_apply, "q_data", elem_restr_qd_i, CEED_BASIS_COLLOCATED, q_data);
645   CeedOperatorSetField(op_apply, "v", elem_restr_u, basis_u, CEED_VECTOR_ACTIVE);
646 
647   // Create the error operator
648   CeedOperatorCreate(ceed, qf_error, CEED_QFUNCTION_NONE, CEED_QFUNCTION_NONE, &op_error);
649   CeedOperatorSetField(op_error, "u", elem_restr_u, basis_u, CEED_VECTOR_ACTIVE);
650   CeedOperatorSetField(op_error, "true_soln", elem_restr_u_i, CEED_BASIS_COLLOCATED, target);
651   CeedOperatorSetField(op_error, "qdata", elem_restr_qd_i, CEED_BASIS_COLLOCATED, q_data);
652   CeedOperatorSetField(op_error, "error", elem_restr_u_i, CEED_BASIS_COLLOCATED, CEED_VECTOR_ACTIVE);
653 
654   // Set up Mat
655   PetscCall(PetscMalloc1(1, &op_apply_ctx));
656   op_apply_ctx->comm   = comm;
657   op_apply_ctx->l_to_g = l_to_g;
658   if (bp_choice != CEED_BP1 && bp_choice != CEED_BP2) {
659     op_apply_ctx->l_to_g_0 = l_to_g_0;
660     op_apply_ctx->g_to_g_D = g_to_g_D;
661   }
662   op_apply_ctx->X_loc = X_loc;
663   PetscCall(VecDuplicate(X_loc, &op_apply_ctx->Y_loc));
664   CeedVectorCreate(ceed, l_size * num_comp_u, &op_apply_ctx->x_ceed);
665   CeedVectorCreate(ceed, l_size * num_comp_u, &op_apply_ctx->y_ceed);
666   op_apply_ctx->op     = op_apply;
667   op_apply_ctx->q_data = q_data;
668   op_apply_ctx->ceed   = ceed;
669 
670   PetscCall(MatCreateShell(comm, m_nodes[0] * m_nodes[1] * m_nodes[2] * num_comp_u, m_nodes[0] * m_nodes[1] * m_nodes[2] * num_comp_u, PETSC_DECIDE,
671                            PETSC_DECIDE, op_apply_ctx, &mat));
672   if (bp_choice == CEED_BP1 || bp_choice == CEED_BP2) {
673     PetscCall(MatShellSetOperation(mat, MATOP_MULT, (void (*)(void))MatMult_Mass));
674   } else {
675     PetscCall(MatShellSetOperation(mat, MATOP_MULT, (void (*)(void))MatMult_Diff));
676   }
677   PetscCall(VecGetType(X, &vec_type));
678   PetscCall(MatShellSetVecType(mat, vec_type));
679 
680   // Get RHS vector
681   PetscCall(VecDuplicate(X, &rhs));
682   PetscCall(VecDuplicate(X_loc, &rhs_loc));
683   PetscCall(VecZeroEntries(rhs_loc));
684   PetscCall(VecGetArrayAndMemType(rhs_loc, &r, &mem_type));
685   CeedVectorSetArray(rhs_ceed, MemTypeP2C(mem_type), CEED_USE_POINTER, r);
686 
687   // Setup q_data, rhs, and target
688   CeedOperatorApply(op_setup_geo, x_coord, q_data, CEED_REQUEST_IMMEDIATE);
689   CeedOperatorApply(op_setup_rhs, x_coord, rhs_ceed, CEED_REQUEST_IMMEDIATE);
690   CeedVectorDestroy(&x_coord);
691 
692   // Gather RHS
693   PetscCall(CeedVectorTakeArray(rhs_ceed, MemTypeP2C(mem_type), NULL));
694   PetscCall(VecRestoreArrayAndMemType(rhs_loc, &r));
695   PetscCall(VecZeroEntries(rhs));
696   PetscCall(VecScatterBegin(l_to_g, rhs_loc, rhs, ADD_VALUES, SCATTER_FORWARD));
697   PetscCall(VecScatterEnd(l_to_g, rhs_loc, rhs, ADD_VALUES, SCATTER_FORWARD));
698   CeedVectorDestroy(&rhs_ceed);
699 
700   PetscCall(KSPCreate(comm, &ksp));
701   {
702     PC pc;
703     PetscCall(KSPGetPC(ksp, &pc));
704     if (bp_choice == CEED_BP1 || bp_choice == CEED_BP2) {
705       PetscCall(PCSetType(pc, PCJACOBI));
706       PetscCall(PCJacobiSetType(pc, PC_JACOBI_ROWSUM));
707     } else {
708       PetscCall(PCSetType(pc, PCNONE));
709     }
710     PetscCall(KSPSetType(ksp, KSPCG));
711     PetscCall(KSPSetNormType(ksp, KSP_NORM_NATURAL));
712     PetscCall(KSPSetTolerances(ksp, 1e-10, PETSC_DEFAULT, PETSC_DEFAULT, PETSC_DEFAULT));
713   }
714   PetscCall(KSPSetOperators(ksp, mat, mat));
715   // First run's performance log is not considered for benchmarking purposes
716   PetscCall(KSPSetTolerances(ksp, 1e-10, PETSC_DEFAULT, PETSC_DEFAULT, 1));
717   my_rt_start = MPI_Wtime();
718   PetscCall(KSPSolve(ksp, rhs, X));
719   my_rt = MPI_Wtime() - my_rt_start;
720   PetscCall(MPI_Allreduce(MPI_IN_PLACE, &my_rt, 1, MPI_DOUBLE, MPI_MIN, comm));
721   // Set maxits based on first iteration timing
722   if (my_rt > 0.02) {
723     PetscCall(KSPSetTolerances(ksp, 1e-10, PETSC_DEFAULT, PETSC_DEFAULT, ksp_max_it_clip[0]));
724   } else {
725     PetscCall(KSPSetTolerances(ksp, 1e-10, PETSC_DEFAULT, PETSC_DEFAULT, ksp_max_it_clip[1]));
726   }
727   PetscCall(KSPSetFromOptions(ksp));
728 
729   // Timed solve
730   PetscCall(VecZeroEntries(X));
731   PetscCall(PetscBarrier((PetscObject)ksp));
732 
733   // -- Performance logging
734   PetscCall(PetscLogStageRegister("Solve Stage", &solve_stage));
735   PetscCall(PetscLogStagePush(solve_stage));
736 
737   // -- Solve
738   my_rt_start = MPI_Wtime();
739   PetscCall(KSPSolve(ksp, rhs, X));
740   my_rt = MPI_Wtime() - my_rt_start;
741 
742   // -- Performance logging
743   PetscCall(PetscLogStagePop());
744 
745   // Output results
746   {
747     KSPType            ksp_type;
748     KSPConvergedReason reason;
749     PetscReal          rnorm;
750     PetscInt           its;
751     PetscCall(KSPGetType(ksp, &ksp_type));
752     PetscCall(KSPGetConvergedReason(ksp, &reason));
753     PetscCall(KSPGetIterationNumber(ksp, &its));
754     PetscCall(KSPGetResidualNorm(ksp, &rnorm));
755     if (!test_mode || reason < 0 || rnorm > 1e-8) {
756       PetscCall(PetscPrintf(comm,
757                             "  KSP:\n"
758                             "    KSP Type                           : %s\n"
759                             "    KSP Convergence                    : %s\n"
760                             "    Total KSP Iterations               : %" PetscInt_FMT "\n"
761                             "    Final rnorm                        : %e\n",
762                             ksp_type, KSPConvergedReasons[reason], its, (double)rnorm));
763     }
764     if (!test_mode) {
765       PetscCall(PetscPrintf(comm, "  Performance:\n"));
766     }
767     {
768       PetscReal max_error;
769       PetscCall(ComputeErrorMax(op_apply_ctx, op_error, X, target, &max_error));
770       PetscReal tol = 5e-2;
771       if (!test_mode || max_error > tol) {
772         PetscCall(MPI_Allreduce(&my_rt, &rt_min, 1, MPI_DOUBLE, MPI_MIN, comm));
773         PetscCall(MPI_Allreduce(&my_rt, &rt_max, 1, MPI_DOUBLE, MPI_MAX, comm));
774         PetscCall(PetscPrintf(comm,
775                               "    Pointwise Error (max)              : %e\n"
776                               "    CG Solve Time                      : %g (%g) sec\n",
777                               (double)max_error, rt_max, rt_min));
778       }
779     }
780     if (!test_mode) {
781       PetscCall(
782           PetscPrintf(comm, "    DoFs/Sec in CG                     : %g (%g) million\n", 1e-6 * gsize * its / rt_max, 1e-6 * gsize * its / rt_min));
783     }
784   }
785 
786   if (write_solution) {
787     PetscViewer vtk_viewer_soln;
788 
789     PetscCall(PetscViewerCreate(comm, &vtk_viewer_soln));
790     PetscCall(PetscViewerSetType(vtk_viewer_soln, PETSCVIEWERVTK));
791     PetscCall(PetscViewerFileSetName(vtk_viewer_soln, "solution.vtu"));
792     PetscCall(VecView(X, vtk_viewer_soln));
793     PetscCall(PetscViewerDestroy(&vtk_viewer_soln));
794   }
795 
796   PetscCall(VecDestroy(&rhs));
797   PetscCall(VecDestroy(&rhs_loc));
798   PetscCall(VecDestroy(&X));
799   PetscCall(VecDestroy(&op_apply_ctx->X_loc));
800   PetscCall(VecDestroy(&op_apply_ctx->Y_loc));
801   PetscCall(VecScatterDestroy(&l_to_g));
802   PetscCall(VecScatterDestroy(&l_to_g_0));
803   PetscCall(VecScatterDestroy(&g_to_g_D));
804   PetscCall(MatDestroy(&mat));
805   PetscCall(KSPDestroy(&ksp));
806 
807   CeedVectorDestroy(&op_apply_ctx->x_ceed);
808   CeedVectorDestroy(&op_apply_ctx->y_ceed);
809   CeedVectorDestroy(&op_apply_ctx->q_data);
810   CeedVectorDestroy(&target);
811   CeedOperatorDestroy(&op_setup_geo);
812   CeedOperatorDestroy(&op_setup_rhs);
813   CeedOperatorDestroy(&op_apply);
814   CeedOperatorDestroy(&op_error);
815   CeedElemRestrictionDestroy(&elem_restr_u);
816   CeedElemRestrictionDestroy(&elem_restr_x);
817   CeedElemRestrictionDestroy(&elem_restr_u_i);
818   CeedElemRestrictionDestroy(&elem_restr_qd_i);
819   CeedQFunctionDestroy(&qf_setup_geo);
820   CeedQFunctionDestroy(&qf_setup_rhs);
821   CeedQFunctionDestroy(&qf_apply);
822   CeedQFunctionDestroy(&qf_error);
823   CeedBasisDestroy(&basis_u);
824   CeedBasisDestroy(&basis_x);
825   CeedDestroy(&ceed);
826   PetscCall(PetscFree(op_apply_ctx));
827   return PetscFinalize();
828 }
829