xref: /libCEED/examples/petsc/bpsraw.c (revision 59f9181e1c585e2777fc79a7abc99a62c123904e)
1 // Copyright (c) 2017, Lawrence Livermore National Security, LLC. Produced at
2 // the Lawrence Livermore National Laboratory. LLNL-CODE-734707. All Rights
3 // reserved. See files LICENSE and NOTICE for details.
4 //
5 // This file is part of CEED, a collection of benchmarks, miniapps, software
6 // libraries and APIs for efficient high-order finite element and spectral
7 // element discretizations for exascale applications. For more information and
8 // source code availability see http://github.com/ceed.
9 //
10 // The CEED research is supported by the Exascale Computing Project 17-SC-20-SC,
11 // a collaborative effort of two U.S. Department of Energy organizations (Office
12 // of Science and the National Nuclear Security Administration) responsible for
13 // the planning and preparation of a capable exascale ecosystem, including
14 // software, applications, hardware, advanced system engineering and early
15 // testbed platforms, in support of the nation's exascale computing imperative.
16 
17 //                        libCEED + PETSc Example: CEED BPs
18 //
19 // This example demonstrates a simple usage of libCEED with PETSc to solve the
20 // CEED BP benchmark problems, see http://ceed.exascaleproject.org/bps.
21 //
22 // The code is intentionally "raw", using only low-level communication
23 // primitives.
24 //
25 // Build with:
26 //
27 //     make bpsraw [PETSC_DIR=</path/to/petsc>] [CEED_DIR=</path/to/libceed>]
28 //
29 // Sample runs:
30 //
31 //     ./bpsraw -problem bp1
32 //     ./bpsraw -problem bp2 -ceed /cpu/self
33 //     ./bpsraw -problem bp3 -ceed /gpu/occa
34 //     ./bpsraw -problem bp4 -ceed /cpu/occa
35 //     ./bpsraw -problem bp5 -ceed /omp/occa
36 //     ./bpsraw -problem bp6 -ceed /ocl/occa
37 //
38 //TESTARGS -ceed {ceed_resource} -test -problem bp2 -degree 5 -qextra 5
39 
40 /// @file
41 /// CEED BPs example using PETSc
42 /// See bps.c for an implementation using DMPlex unstructured grids.
43 const char help[] = "Solve CEED BPs using PETSc\n";
44 
45 #include <stdbool.h>
46 #include <string.h>
47 #include <petscksp.h>
48 #include <ceed.h>
49 #include "qfunctions/bps/common.h"
50 #include "qfunctions/bps/bp1.h"
51 #include "qfunctions/bps/bp2.h"
52 #include "qfunctions/bps/bp3.h"
53 #include "qfunctions/bps/bp4.h"
54 
55 static void Split3(PetscInt size, PetscInt m[3], bool reverse) {
56   for (PetscInt d=0,sizeleft=size; d<3; d++) {
57     PetscInt try = (PetscInt)PetscCeilReal(PetscPowReal(sizeleft, 1./(3 - d)));
58     while (try * (sizeleft / try) != sizeleft) try++;
59     m[reverse ? 2-d : d] = try;
60     sizeleft /= try;
61   }
62 }
63 
64 static PetscInt Max3(const PetscInt a[3]) {
65   return PetscMax(a[0], PetscMax(a[1], a[2]));
66 }
67 static PetscInt Min3(const PetscInt a[3]) {
68   return PetscMin(a[0], PetscMin(a[1], a[2]));
69 }
70 static void GlobalNodes(const PetscInt p[3], const PetscInt irank[3],
71                         PetscInt degree, const PetscInt melem[3],
72                         PetscInt mnodes[3]) {
73   for (int d=0; d<3; d++)
74     mnodes[d] = degree*melem[d] + (irank[d] == p[d]-1);
75 }
76 static PetscInt GlobalStart(const PetscInt p[3], const PetscInt irank[3],
77                             PetscInt degree, const PetscInt melem[3]) {
78   PetscInt start = 0;
79   // Dumb brute-force is easier to read
80   for (PetscInt i=0; i<p[0]; i++) {
81     for (PetscInt j=0; j<p[1]; j++) {
82       for (PetscInt k=0; k<p[2]; k++) {
83         PetscInt mnodes[3], ijkrank[] = {i,j,k};
84         if (i == irank[0] && j == irank[1] && k == irank[2]) return start;
85         GlobalNodes(p, ijkrank, degree, melem, mnodes);
86         start += mnodes[0] * mnodes[1] * mnodes[2];
87       }
88     }
89   }
90   return -1;
91 }
92 static int CreateRestriction(Ceed ceed, CeedInterlaceMode imode,
93                              const CeedInt melem[3], CeedInt P, CeedInt ncomp,
94                              CeedElemRestriction *Erestrict) {
95   const PetscInt nelem = melem[0]*melem[1]*melem[2];
96   PetscInt mnodes[3], *idx, *idxp;
97 
98   // Get indicies
99   for (int d=0; d<3; d++) mnodes[d] = melem[d]*(P-1) + 1;
100   idxp = idx = malloc(nelem*P*P*P*sizeof idx[0]);
101   for (CeedInt i=0; i<melem[0]; i++)
102     for (CeedInt j=0; j<melem[1]; j++)
103       for (CeedInt k=0; k<melem[2]; k++,idxp += P*P*P)
104         for (CeedInt ii=0; ii<P; ii++)
105           for (CeedInt jj=0; jj<P; jj++)
106             for (CeedInt kk=0; kk<P; kk++) {
107               if (0) { // This is the C-style (i,j,k) ordering that I prefer
108                 idxp[(ii*P+jj)*P+kk] = (((i*(P-1)+ii)*mnodes[1]
109                                          + (j*(P-1)+jj))*mnodes[2]
110                                         + (k*(P-1)+kk));
111               } else { // (k,j,i) ordering for consistency with MFEM example
112                 idxp[ii+P*(jj+P*kk)] = (((i*(P-1)+ii)*mnodes[1]
113                                          + (j*(P-1)+jj))*mnodes[2]
114                                         + (k*(P-1)+kk));
115               }
116             }
117 
118   // Setup CEED restriction
119   CeedElemRestrictionCreate(ceed, imode, nelem, P*P*P,
120                             mnodes[0]*mnodes[1]*mnodes[2], ncomp,
121                             CEED_MEM_HOST, CEED_OWN_POINTER, idx, Erestrict);
122 
123   PetscFunctionReturn(0);
124 }
125 
126 // Data for PETSc
127 typedef struct User_ *User;
128 struct User_ {
129   MPI_Comm comm;
130   VecScatter ltog;              // Scatter for all entries
131   VecScatter ltog0;             // Skip Dirichlet values
132   VecScatter gtogD;             // global-to-global; only Dirichlet values
133   Vec Xloc, Yloc;
134   CeedVector xceed, yceed;
135   CeedOperator op;
136   CeedVector qdata;
137   Ceed ceed;
138 };
139 
140 // BP Options
141 typedef enum {
142   CEED_BP1 = 0, CEED_BP2 = 1, CEED_BP3 = 2,
143   CEED_BP4 = 3, CEED_BP5 = 4, CEED_BP6 = 5
144 } bpType;
145 static const char *const bpTypes[] = {"bp1","bp2","bp3","bp4","bp5","bp6",
146                                       "bpType","CEED_BP",0
147                                      };
148 
149 // BP specific data
150 typedef struct {
151   CeedInt ncompu, qdatasize, qextra;
152   CeedQFunctionUser setupgeo, setuprhs, apply, error;
153   const char *setupgeofname, *setuprhsfname, *applyfname, *errorfname;
154   CeedEvalMode inmode, outmode;
155   CeedQuadMode qmode;
156 } bpData;
157 
158 bpData bpOptions[6] = {
159   [CEED_BP1] = {
160     .ncompu = 1,
161     .qdatasize = 1,
162     .qextra = 1,
163     .setupgeo = SetupMassGeo,
164     .setuprhs = SetupMassRhs,
165     .apply = Mass,
166     .error = Error,
167     .setupgeofname = SetupMassGeo_loc,
168     .setuprhsfname = SetupMassRhs_loc,
169     .applyfname = Mass_loc,
170     .errorfname = Error_loc,
171     .inmode = CEED_EVAL_INTERP,
172     .outmode = CEED_EVAL_INTERP,
173     .qmode = CEED_GAUSS
174   },
175   [CEED_BP2] = {
176     .ncompu = 3,
177     .qdatasize = 1,
178     .qextra = 1,
179     .setupgeo = SetupMassGeo,
180     .setuprhs = SetupMassRhs3,
181     .apply = Mass3,
182     .error = Error3,
183     .setupgeofname = SetupMassGeo_loc,
184     .setuprhsfname = SetupMassRhs3_loc,
185     .applyfname = Mass3_loc,
186     .errorfname = Error3_loc,
187     .inmode = CEED_EVAL_INTERP,
188     .outmode = CEED_EVAL_INTERP,
189     .qmode = CEED_GAUSS
190   },
191   [CEED_BP3] = {
192     .ncompu = 1,
193     .qdatasize = 6,
194     .qextra = 1,
195     .setupgeo = SetupDiffGeo,
196     .setuprhs = SetupDiffRhs,
197     .apply = Diff,
198     .error = Error,
199     .setupgeofname = SetupDiffGeo_loc,
200     .setuprhsfname = SetupDiffRhs_loc,
201     .applyfname = Diff_loc,
202     .errorfname = Error_loc,
203     .inmode = CEED_EVAL_GRAD,
204     .outmode = CEED_EVAL_GRAD,
205     .qmode = CEED_GAUSS
206   },
207   [CEED_BP4] = {
208     .ncompu = 3,
209     .qdatasize = 6,
210     .qextra = 1,
211     .setupgeo = SetupDiffGeo,
212     .setuprhs = SetupDiffRhs3,
213     .apply = Diff3,
214     .error = Error3,
215     .setupgeofname = SetupDiffGeo_loc,
216     .setuprhsfname = SetupDiffRhs3_loc,
217     .applyfname = Diff_loc,
218     .errorfname = Error3_loc,
219     .inmode = CEED_EVAL_GRAD,
220     .outmode = CEED_EVAL_GRAD,
221     .qmode = CEED_GAUSS
222   },
223   [CEED_BP5] = {
224     .ncompu = 1,
225     .qdatasize = 6,
226     .qextra = 0,
227     .setupgeo = SetupDiffGeo,
228     .setuprhs = SetupDiffRhs,
229     .apply = Diff,
230     .error = Error,
231     .setupgeofname = SetupDiffGeo_loc,
232     .setuprhsfname = SetupDiffRhs_loc,
233     .applyfname = Diff_loc,
234     .errorfname = Error_loc,
235     .inmode = CEED_EVAL_GRAD,
236     .outmode = CEED_EVAL_GRAD,
237     .qmode = CEED_GAUSS_LOBATTO
238   },
239   [CEED_BP6] = {
240     .ncompu = 3,
241     .qdatasize = 6,
242     .qextra = 0,
243     .setupgeo = SetupDiffGeo,
244     .setuprhs = SetupDiffRhs3,
245     .apply = Diff3,
246     .error = Error3,
247     .setupgeofname = SetupDiffGeo_loc,
248     .setuprhsfname = SetupDiffRhs3_loc,
249     .applyfname = Diff_loc,
250     .errorfname = Error3_loc,
251     .inmode = CEED_EVAL_GRAD,
252     .outmode = CEED_EVAL_GRAD,
253     .qmode = CEED_GAUSS_LOBATTO
254   }
255 };
256 
257 // This function uses libCEED to compute the action of the mass matrix
258 static PetscErrorCode MatMult_Mass(Mat A, Vec X, Vec Y) {
259   PetscErrorCode ierr;
260   User user;
261   PetscScalar *x, *y;
262 
263   PetscFunctionBeginUser;
264   ierr = MatShellGetContext(A, &user); CHKERRQ(ierr);
265   ierr = VecScatterBegin(user->ltog, X, user->Xloc, INSERT_VALUES,
266                          SCATTER_REVERSE); CHKERRQ(ierr);
267   ierr = VecScatterEnd(user->ltog, X, user->Xloc, INSERT_VALUES, SCATTER_REVERSE);
268   CHKERRQ(ierr);
269   ierr = VecZeroEntries(user->Yloc); CHKERRQ(ierr);
270 
271   ierr = VecGetArrayRead(user->Xloc, (const PetscScalar **)&x); CHKERRQ(ierr);
272   ierr = VecGetArray(user->Yloc, &y); CHKERRQ(ierr);
273   CeedVectorSetArray(user->xceed, CEED_MEM_HOST, CEED_USE_POINTER, x);
274   CeedVectorSetArray(user->yceed, CEED_MEM_HOST, CEED_USE_POINTER, y);
275 
276   CeedOperatorApply(user->op, user->xceed, user->yceed,
277                     CEED_REQUEST_IMMEDIATE);
278   ierr = CeedVectorSyncArray(user->yceed, CEED_MEM_HOST); CHKERRQ(ierr);
279 
280   ierr = VecRestoreArrayRead(user->Xloc, (const PetscScalar **)&x); CHKERRQ(ierr);
281   ierr = VecRestoreArray(user->Yloc, &y); CHKERRQ(ierr);
282 
283   if (Y) {
284     ierr = VecZeroEntries(Y); CHKERRQ(ierr);
285     ierr = VecScatterBegin(user->ltog, user->Yloc, Y, ADD_VALUES, SCATTER_FORWARD);
286     CHKERRQ(ierr);
287     ierr = VecScatterEnd(user->ltog, user->Yloc, Y, ADD_VALUES, SCATTER_FORWARD);
288     CHKERRQ(ierr);
289   }
290   PetscFunctionReturn(0);
291 }
292 
293 // This function uses libCEED to compute the action of the Laplacian with
294 // Dirichlet boundary conditions
295 static PetscErrorCode MatMult_Diff(Mat A, Vec X, Vec Y) {
296   PetscErrorCode ierr;
297   User user;
298   PetscScalar *x, *y;
299 
300   PetscFunctionBeginUser;
301   ierr = MatShellGetContext(A, &user); CHKERRQ(ierr);
302 
303   // Global-to-local
304   ierr = VecScatterBegin(user->ltog0, X, user->Xloc, INSERT_VALUES,
305                          SCATTER_REVERSE); CHKERRQ(ierr);
306   ierr = VecScatterEnd(user->ltog0, X, user->Xloc, INSERT_VALUES,
307                        SCATTER_REVERSE);
308   CHKERRQ(ierr);
309   ierr = VecZeroEntries(user->Yloc); CHKERRQ(ierr);
310 
311   // Setup CEED vectors
312   ierr = VecGetArrayRead(user->Xloc, (const PetscScalar **)&x); CHKERRQ(ierr);
313   ierr = VecGetArray(user->Yloc, &y); CHKERRQ(ierr);
314   CeedVectorSetArray(user->xceed, CEED_MEM_HOST, CEED_USE_POINTER, x);
315   CeedVectorSetArray(user->yceed, CEED_MEM_HOST, CEED_USE_POINTER, y);
316 
317   // Apply CEED operator
318   CeedOperatorApply(user->op, user->xceed, user->yceed,
319                     CEED_REQUEST_IMMEDIATE);
320   ierr = CeedVectorSyncArray(user->yceed, CEED_MEM_HOST); CHKERRQ(ierr);
321 
322   // Restore PETSc vectors
323   ierr = VecRestoreArrayRead(user->Xloc, (const PetscScalar **)&x); CHKERRQ(ierr);
324   ierr = VecRestoreArray(user->Yloc, &y); CHKERRQ(ierr);
325 
326   // Local-to-global
327   ierr = VecZeroEntries(Y); CHKERRQ(ierr);
328   ierr = VecScatterBegin(user->gtogD, X, Y, INSERT_VALUES, SCATTER_FORWARD);
329   CHKERRQ(ierr);
330   ierr = VecScatterEnd(user->gtogD, X, Y, INSERT_VALUES, SCATTER_FORWARD);
331   CHKERRQ(ierr);
332   ierr = VecScatterBegin(user->ltog0, user->Yloc, Y, ADD_VALUES, SCATTER_FORWARD);
333   CHKERRQ(ierr);
334   ierr = VecScatterEnd(user->ltog0, user->Yloc, Y, ADD_VALUES, SCATTER_FORWARD);
335   CHKERRQ(ierr);
336 
337   PetscFunctionReturn(0);
338 }
339 
340 // This function calculates the error in the final solution
341 static PetscErrorCode ComputeErrorMax(User user, CeedOperator op_error, Vec X,
342                                       CeedVector target, PetscReal *maxerror) {
343   PetscErrorCode ierr;
344   PetscScalar *x;
345   CeedVector collocated_error;
346   CeedInt length;
347 
348   PetscFunctionBeginUser;
349   CeedVectorGetLength(target, &length);
350   CeedVectorCreate(user->ceed, length, &collocated_error);
351 
352   // Global-to-local
353   ierr = VecScatterBegin(user->ltog, X, user->Xloc, INSERT_VALUES,
354                          SCATTER_REVERSE); CHKERRQ(ierr);
355   ierr = VecScatterEnd(user->ltog, X, user->Xloc, INSERT_VALUES, SCATTER_REVERSE);
356   CHKERRQ(ierr);
357 
358   // Setup CEED vector
359   ierr = VecGetArrayRead(user->Xloc, (const PetscScalar **)&x); CHKERRQ(ierr);
360   CeedVectorSetArray(user->xceed, CEED_MEM_HOST, CEED_USE_POINTER, x);
361 
362   // Apply CEED operator
363   CeedOperatorApply(op_error, user->xceed, collocated_error,
364                     CEED_REQUEST_IMMEDIATE);
365 
366   // Restore PETSc vector
367   VecRestoreArrayRead(user->Xloc, (const PetscScalar **)&x); CHKERRQ(ierr);
368 
369   // Reduce max error
370   *maxerror = 0;
371   const CeedScalar *e;
372   CeedVectorGetArrayRead(collocated_error, CEED_MEM_HOST, &e);
373   for (CeedInt i=0; i<length; i++) {
374     *maxerror = PetscMax(*maxerror, PetscAbsScalar(e[i]));
375   }
376   CeedVectorRestoreArrayRead(collocated_error, &e);
377   ierr = MPI_Allreduce(MPI_IN_PLACE, maxerror,
378                        1, MPIU_REAL, MPIU_MAX, user->comm); CHKERRQ(ierr);
379 
380   // Cleanup
381   CeedVectorDestroy(&collocated_error);
382 
383   PetscFunctionReturn(0);
384 }
385 
386 int main(int argc, char **argv) {
387   PetscInt ierr;
388   MPI_Comm comm;
389   char ceedresource[PETSC_MAX_PATH_LEN] = "/cpu/self";
390   double my_rt_start, my_rt, rt_min, rt_max;
391   PetscInt degree, qextra, localnodes, localelem, melem[3], mnodes[3], p[3],
392            irank[3], lnodes[3], lsize, ncompu = 1;
393   PetscScalar *r;
394   PetscBool test_mode, benchmark_mode, write_solution;
395   PetscMPIInt size, rank;
396   Vec X, Xloc, rhs, rhsloc;
397   Mat mat;
398   KSP ksp;
399   VecScatter ltog, ltog0, gtogD;
400   User user;
401   Ceed ceed;
402   CeedBasis basisx, basisu;
403   CeedElemRestriction Erestrictx, Erestrictu, Erestrictxi, Erestrictui,
404                       Erestrictqdi;
405   CeedQFunction qf_setupgeo, qf_setuprhs, qf_apply, qf_error;
406   CeedOperator op_setupgeo, op_setuprhs, op_apply, op_error;
407   CeedVector xcoord, qdata, rhsceed, target;
408   CeedInt P, Q;
409   const CeedInt dim = 3, ncompx = 3;
410   bpType bpChoice;
411 
412   ierr = PetscInitialize(&argc, &argv, NULL, help);
413   if (ierr) return ierr;
414   comm = PETSC_COMM_WORLD;
415 
416   // Read command line options
417   ierr = PetscOptionsBegin(comm, NULL, "CEED BPs in PETSc", NULL); CHKERRQ(ierr);
418   bpChoice = CEED_BP1;
419   ierr = PetscOptionsEnum("-problem",
420                           "CEED benchmark problem to solve", NULL,
421                           bpTypes, (PetscEnum)bpChoice, (PetscEnum *)&bpChoice,
422                           NULL); CHKERRQ(ierr);
423   ncompu = bpOptions[bpChoice].ncompu;
424   test_mode = PETSC_FALSE;
425   ierr = PetscOptionsBool("-test",
426                           "Testing mode (do not print unless error is large)",
427                           NULL, test_mode, &test_mode, NULL); CHKERRQ(ierr);
428   benchmark_mode = PETSC_FALSE;
429   ierr = PetscOptionsBool("-benchmark",
430                           "Benchmarking mode (prints benchmark statistics)",
431                           NULL, benchmark_mode, &benchmark_mode, NULL);
432   CHKERRQ(ierr);
433   write_solution = PETSC_FALSE;
434   ierr = PetscOptionsBool("-write_solution",
435                           "Write solution for visualization",
436                           NULL, write_solution, &write_solution, NULL);
437   CHKERRQ(ierr);
438   degree = test_mode ? 3 : 1;
439   ierr = PetscOptionsInt("-degree", "Polynomial degree of tensor product basis",
440                          NULL, degree, &degree, NULL); CHKERRQ(ierr);
441   qextra = bpOptions[bpChoice].qextra;
442   ierr = PetscOptionsInt("-qextra", "Number of extra quadrature points",
443                          NULL, qextra, &qextra, NULL); CHKERRQ(ierr);
444   ierr = PetscOptionsString("-ceed", "CEED resource specifier",
445                             NULL, ceedresource, ceedresource,
446                             sizeof(ceedresource), NULL); CHKERRQ(ierr);
447   localnodes = 1000;
448   ierr = PetscOptionsInt("-local",
449                          "Target number of locally owned nodes per process",
450                          NULL, localnodes, &localnodes, NULL); CHKERRQ(ierr);
451   ierr = PetscOptionsEnd(); CHKERRQ(ierr);
452   P = degree + 1;
453   Q = P + qextra;
454 
455   // Determine size of process grid
456   ierr = MPI_Comm_size(comm, &size); CHKERRQ(ierr);
457   Split3(size, p, false);
458 
459   // Find a nicely composite number of elements no less than localnodes
460   for (localelem = PetscMax(1, localnodes / (degree*degree*degree)); ;
461        localelem++) {
462     Split3(localelem, melem, true);
463     if (Max3(melem) / Min3(melem) <= 2) break;
464   }
465 
466   // Find my location in the process grid
467   ierr = MPI_Comm_rank(comm, &rank); CHKERRQ(ierr);
468   for (int d=0,rankleft=rank; d<dim; d++) {
469     const int pstride[3] = {p[1] *p[2], p[2], 1};
470     irank[d] = rankleft / pstride[d];
471     rankleft -= irank[d] * pstride[d];
472   }
473 
474   GlobalNodes(p, irank, degree, melem, mnodes);
475 
476   // Setup global vector
477   ierr = VecCreate(comm, &X); CHKERRQ(ierr);
478   ierr = VecSetSizes(X, mnodes[0]*mnodes[1]*mnodes[2]*ncompu, PETSC_DECIDE);
479   CHKERRQ(ierr);
480   ierr = VecSetUp(X); CHKERRQ(ierr);
481 
482   // Set up libCEED
483   CeedInit(ceedresource, &ceed);
484 
485   // Print summary
486   CeedInt gsize;
487   ierr = VecGetSize(X, &gsize); CHKERRQ(ierr);
488   if (!test_mode) {
489     const char *usedresource;
490     CeedGetResource(ceed, &usedresource);
491     ierr = PetscPrintf(comm,
492                        "\n-- CEED Benchmark Problem %d -- libCEED + PETSc --\n"
493                        "  libCEED:\n"
494                        "    libCEED Backend                    : %s\n"
495                        "  Mesh:\n"
496                        "    Number of 1D Basis Nodes (p)       : %d\n"
497                        "    Number of 1D Quadrature Points (q) : %d\n"
498                        "    Global nodes                       : %D\n"
499                        "    Process Decomposition              : %D %D %D\n"
500                        "    Local Elements                     : %D = %D %D %D\n"
501                        "    Owned nodes                        : %D = %D %D %D\n"
502                        "    DoF per node                       : %D\n",
503                        bpChoice+1, usedresource, P, Q,  gsize/ncompu, p[0],
504                        p[1], p[2], localelem, melem[0], melem[1], melem[2],
505                        mnodes[0]*mnodes[1]*mnodes[2], mnodes[0], mnodes[1],
506                        mnodes[2], ncompu); CHKERRQ(ierr);
507   }
508 
509   {
510     lsize = 1;
511     for (int d=0; d<dim; d++) {
512       lnodes[d] = melem[d]*degree + 1;
513       lsize *= lnodes[d];
514     }
515     ierr = VecCreate(PETSC_COMM_SELF, &Xloc); CHKERRQ(ierr);
516     ierr = VecSetSizes(Xloc, lsize*ncompu, PETSC_DECIDE); CHKERRQ(ierr);
517     ierr = VecSetUp(Xloc); CHKERRQ(ierr);
518 
519     // Create local-to-global scatter
520     PetscInt *ltogind, *ltogind0, *locind, l0count;
521     IS ltogis, ltogis0, locis;
522     PetscInt gstart[2][2][2], gmnodes[2][2][2][dim];
523 
524     for (int i=0; i<2; i++) {
525       for (int j=0; j<2; j++) {
526         for (int k=0; k<2; k++) {
527           PetscInt ijkrank[3] = {irank[0]+i, irank[1]+j, irank[2]+k};
528           gstart[i][j][k] = GlobalStart(p, ijkrank, degree, melem);
529           GlobalNodes(p, ijkrank, degree, melem, gmnodes[i][j][k]);
530         }
531       }
532     }
533 
534     ierr = PetscMalloc1(lsize, &ltogind); CHKERRQ(ierr);
535     ierr = PetscMalloc1(lsize, &ltogind0); CHKERRQ(ierr);
536     ierr = PetscMalloc1(lsize, &locind); CHKERRQ(ierr);
537     l0count = 0;
538     for (PetscInt i=0,ir,ii; ir=i>=mnodes[0], ii=i-ir*mnodes[0], i<lnodes[0]; i++)
539       for (PetscInt j=0,jr,jj; jr=j>=mnodes[1], jj=j-jr*mnodes[1], j<lnodes[1]; j++)
540         for (PetscInt k=0,kr,kk; kr=k>=mnodes[2], kk=k-kr*mnodes[2], k<lnodes[2]; k++) {
541           PetscInt here = (i*lnodes[1]+j)*lnodes[2]+k;
542           ltogind[here] =
543             gstart[ir][jr][kr] + (ii*gmnodes[ir][jr][kr][1]+jj)*gmnodes[ir][jr][kr][2]+kk;
544           if ((irank[0] == 0 && i == 0)
545               || (irank[1] == 0 && j == 0)
546               || (irank[2] == 0 && k == 0)
547               || (irank[0]+1 == p[0] && i+1 == lnodes[0])
548               || (irank[1]+1 == p[1] && j+1 == lnodes[1])
549               || (irank[2]+1 == p[2] && k+1 == lnodes[2]))
550             continue;
551           ltogind0[l0count] = ltogind[here];
552           locind[l0count++] = here;
553         }
554     ierr = ISCreateBlock(comm, ncompu, lsize, ltogind, PETSC_OWN_POINTER,
555                          &ltogis); CHKERRQ(ierr);
556     ierr = VecScatterCreate(Xloc, NULL, X, ltogis, &ltog); CHKERRQ(ierr);
557     CHKERRQ(ierr);
558     ierr = ISCreateBlock(comm, ncompu, l0count, ltogind0, PETSC_OWN_POINTER,
559                          &ltogis0); CHKERRQ(ierr);
560     ierr = ISCreateBlock(comm, ncompu, l0count, locind, PETSC_OWN_POINTER,
561                          &locis); CHKERRQ(ierr);
562     ierr = VecScatterCreate(Xloc, locis, X, ltogis0, &ltog0); CHKERRQ(ierr);
563     {
564       // Create global-to-global scatter for Dirichlet values (everything not in
565       // ltogis0, which is the range of ltog0)
566       PetscInt xstart, xend, *indD, countD = 0;
567       IS isD;
568       const PetscScalar *x;
569       ierr = VecZeroEntries(Xloc); CHKERRQ(ierr);
570       ierr = VecSet(X, 1.0); CHKERRQ(ierr);
571       ierr = VecScatterBegin(ltog0, Xloc, X, INSERT_VALUES, SCATTER_FORWARD);
572       CHKERRQ(ierr);
573       ierr = VecScatterEnd(ltog0, Xloc, X, INSERT_VALUES, SCATTER_FORWARD);
574       CHKERRQ(ierr);
575       ierr = VecGetOwnershipRange(X, &xstart, &xend); CHKERRQ(ierr);
576       ierr = PetscMalloc1(xend-xstart, &indD); CHKERRQ(ierr);
577       ierr = VecGetArrayRead(X, &x); CHKERRQ(ierr);
578       for (PetscInt i=0; i<xend-xstart; i++) {
579         if (x[i] == 1.) indD[countD++] = xstart + i;
580       }
581       ierr = VecRestoreArrayRead(X, &x); CHKERRQ(ierr);
582       ierr = ISCreateGeneral(comm, countD, indD, PETSC_COPY_VALUES, &isD);
583       CHKERRQ(ierr);
584       ierr = PetscFree(indD); CHKERRQ(ierr);
585       ierr = VecScatterCreate(X, isD, X, isD, &gtogD); CHKERRQ(ierr);
586       ierr = ISDestroy(&isD); CHKERRQ(ierr);
587     }
588     ierr = ISDestroy(&ltogis); CHKERRQ(ierr);
589     ierr = ISDestroy(&ltogis0); CHKERRQ(ierr);
590     ierr = ISDestroy(&locis); CHKERRQ(ierr);
591   }
592 
593   // CEED bases
594   CeedBasisCreateTensorH1Lagrange(ceed, dim, ncompu, P, Q,
595                                   bpOptions[bpChoice].qmode, &basisu);
596   CeedBasisCreateTensorH1Lagrange(ceed, dim, ncompx, 2, Q,
597                                   bpOptions[bpChoice].qmode, &basisx);
598 
599   // CEED restrictions
600   CreateRestriction(ceed, CEED_INTERLACED, melem, P, ncompu, &Erestrictu);
601   CreateRestriction(ceed, CEED_NONINTERLACED, melem, 2, dim, &Erestrictx);
602   CeedInt nelem = melem[0]*melem[1]*melem[2];
603   CeedInt stridesu[3] = {1, Q *Q*Q, Q *Q *Q*ncompu};
604   CeedElemRestrictionCreateStrided(ceed, nelem, Q*Q*Q, nelem*Q*Q*Q, ncompu,
605                                    stridesu, &Erestrictui);
606   CeedInt stridesqd[3] = {1, Q *Q*Q, Q *Q *Q *bpOptions[bpChoice].qdatasize};
607   CeedElemRestrictionCreateStrided(ceed, nelem, Q*Q*Q, nelem*Q*Q*Q,
608                                    bpOptions[bpChoice].qdatasize, stridesqd,
609                                    &Erestrictqdi);
610   CeedInt stridesx[3] = {1, Q *Q*Q, Q *Q*Q};
611   CeedElemRestrictionCreateStrided(ceed, nelem, Q*Q*Q, nelem*Q*Q*Q, 1, stridesx,
612                                    &Erestrictxi);
613   {
614     CeedScalar *xloc;
615     CeedInt shape[3] = {melem[0]+1, melem[1]+1, melem[2]+1}, len =
616                          shape[0]*shape[1]*shape[2];
617     xloc = malloc(len*ncompx*sizeof xloc[0]);
618     for (CeedInt i=0; i<shape[0]; i++) {
619       for (CeedInt j=0; j<shape[1]; j++) {
620         for (CeedInt k=0; k<shape[2]; k++) {
621           xloc[((i*shape[1]+j)*shape[2]+k) + 0*len] = 1.*(irank[0]*melem[0]+i) /
622               (p[0]*melem[0]);
623           xloc[((i*shape[1]+j)*shape[2]+k) + 1*len] = 1.*(irank[1]*melem[1]+j) /
624               (p[1]*melem[1]);
625           xloc[((i*shape[1]+j)*shape[2]+k) + 2*len] = 1.*(irank[2]*melem[2]+k) /
626               (p[2]*melem[2]);
627         }
628       }
629     }
630     CeedVectorCreate(ceed, len*ncompx, &xcoord);
631     CeedVectorSetArray(xcoord, CEED_MEM_HOST, CEED_OWN_POINTER, xloc);
632   }
633 
634   // Create the Qfunction that builds the operator quadrature data
635   CeedQFunctionCreateInterior(ceed, 1, bpOptions[bpChoice].setupgeo,
636                               bpOptions[bpChoice].setupgeofname, &qf_setupgeo);
637   CeedQFunctionAddInput(qf_setupgeo, "dx", ncompx*dim, CEED_EVAL_GRAD);
638   CeedQFunctionAddInput(qf_setupgeo, "weight", 1, CEED_EVAL_WEIGHT);
639   CeedQFunctionAddOutput(qf_setupgeo, "qdata", bpOptions[bpChoice].qdatasize,
640                          CEED_EVAL_NONE);
641 
642   // Create the Qfunction that sets up the RHS and true solution
643   CeedQFunctionCreateInterior(ceed, 1, bpOptions[bpChoice].setuprhs,
644                               bpOptions[bpChoice].setuprhsfname, &qf_setuprhs);
645   CeedQFunctionAddInput(qf_setuprhs, "x", ncompx, CEED_EVAL_INTERP);
646   CeedQFunctionAddInput(qf_setuprhs, "dx", ncompx*dim, CEED_EVAL_GRAD);
647   CeedQFunctionAddInput(qf_setuprhs, "weight", 1, CEED_EVAL_WEIGHT);
648   CeedQFunctionAddOutput(qf_setuprhs, "true_soln", ncompu, CEED_EVAL_NONE);
649   CeedQFunctionAddOutput(qf_setuprhs, "rhs", ncompu, CEED_EVAL_INTERP);
650 
651   // Set up PDE operator
652   CeedQFunctionCreateInterior(ceed, 1, bpOptions[bpChoice].apply,
653                               bpOptions[bpChoice].applyfname, &qf_apply);
654   // Add inputs and outputs
655   CeedInt inscale = bpOptions[bpChoice].inmode==CEED_EVAL_GRAD ? 3 : 1;
656   CeedInt outscale = bpOptions[bpChoice].outmode==CEED_EVAL_GRAD ? 3 : 1;
657   CeedQFunctionAddInput(qf_apply, "u", ncompu*inscale,
658                         bpOptions[bpChoice].inmode);
659   CeedQFunctionAddInput(qf_apply, "qdata", bpOptions[bpChoice].qdatasize,
660                         CEED_EVAL_NONE);
661   CeedQFunctionAddOutput(qf_apply, "v", ncompu*outscale,
662                          bpOptions[bpChoice].outmode);
663 
664   // Create the error qfunction
665   CeedQFunctionCreateInterior(ceed, 1, bpOptions[bpChoice].error,
666                               bpOptions[bpChoice].errorfname, &qf_error);
667   CeedQFunctionAddInput(qf_error, "u", ncompu, CEED_EVAL_INTERP);
668   CeedQFunctionAddInput(qf_error, "true_soln", ncompu, CEED_EVAL_NONE);
669   CeedQFunctionAddOutput(qf_error, "error", ncompu, CEED_EVAL_NONE);
670 
671   // Create the persistent vectors that will be needed in setup
672   CeedInt nqpts;
673   CeedBasisGetNumQuadraturePoints(basisu, &nqpts);
674   CeedVectorCreate(ceed, bpOptions[bpChoice].qdatasize*nelem*nqpts, &qdata);
675   CeedVectorCreate(ceed, nelem*nqpts*ncompu, &target);
676   CeedVectorCreate(ceed, lsize*ncompu, &rhsceed);
677 
678   // Create the operator that builds the quadrature data for the ceed operator
679   CeedOperatorCreate(ceed, qf_setupgeo, CEED_QFUNCTION_NONE,
680                      CEED_QFUNCTION_NONE, &op_setupgeo);
681   CeedOperatorSetField(op_setupgeo, "dx", Erestrictx, basisx,
682                        CEED_VECTOR_ACTIVE);
683   CeedOperatorSetField(op_setupgeo, "weight", Erestrictxi, basisx,
684                        CEED_VECTOR_NONE);
685   CeedOperatorSetField(op_setupgeo, "qdata", Erestrictqdi,
686                        CEED_BASIS_COLLOCATED, CEED_VECTOR_ACTIVE);
687 
688   // Create the operator that builds the RHS and true solution
689   CeedOperatorCreate(ceed, qf_setuprhs, CEED_QFUNCTION_NONE,
690                      CEED_QFUNCTION_NONE, &op_setuprhs);
691   CeedOperatorSetField(op_setuprhs, "x", Erestrictx, basisx,
692                        CEED_VECTOR_ACTIVE);
693   CeedOperatorSetField(op_setuprhs, "dx", Erestrictx, basisx,
694                        CEED_VECTOR_ACTIVE);
695   CeedOperatorSetField(op_setuprhs, "weight", Erestrictxi, basisx,
696                        CEED_VECTOR_NONE);
697   CeedOperatorSetField(op_setuprhs, "true_soln", Erestrictui,
698                        CEED_BASIS_COLLOCATED, target);
699   CeedOperatorSetField(op_setuprhs, "rhs", Erestrictu, basisu,
700                        CEED_VECTOR_ACTIVE);
701 
702   // Create the mass or diff operator
703   CeedOperatorCreate(ceed, qf_apply, CEED_QFUNCTION_NONE, CEED_QFUNCTION_NONE,
704                      &op_apply);
705   CeedOperatorSetField(op_apply, "u", Erestrictu, basisu, CEED_VECTOR_ACTIVE);
706   CeedOperatorSetField(op_apply, "qdata", Erestrictqdi, CEED_BASIS_COLLOCATED,
707                        qdata);
708   CeedOperatorSetField(op_apply, "v", Erestrictu, basisu, CEED_VECTOR_ACTIVE);
709 
710   // Create the error operator
711   CeedOperatorCreate(ceed, qf_error, CEED_QFUNCTION_NONE, CEED_QFUNCTION_NONE,
712                      &op_error);
713   CeedOperatorSetField(op_error, "u", Erestrictu, basisu, CEED_VECTOR_ACTIVE);
714   CeedOperatorSetField(op_error, "true_soln", Erestrictui,
715                        CEED_BASIS_COLLOCATED, target);
716   CeedOperatorSetField(op_error, "error", Erestrictui, CEED_BASIS_COLLOCATED,
717                        CEED_VECTOR_ACTIVE);
718 
719   // Set up Mat
720   ierr = PetscMalloc1(1, &user); CHKERRQ(ierr);
721   user->comm = comm;
722   user->ltog = ltog;
723   if (bpChoice != CEED_BP1 && bpChoice != CEED_BP2) {
724     user->ltog0 = ltog0;
725     user->gtogD = gtogD;
726   }
727   user->Xloc = Xloc;
728   ierr = VecDuplicate(Xloc, &user->Yloc); CHKERRQ(ierr);
729   CeedVectorCreate(ceed, lsize*ncompu, &user->xceed);
730   CeedVectorCreate(ceed, lsize*ncompu, &user->yceed);
731   user->op = op_apply;
732   user->qdata = qdata;
733   user->ceed = ceed;
734 
735   ierr = MatCreateShell(comm, mnodes[0]*mnodes[1]*mnodes[2]*ncompu,
736                         mnodes[0]*mnodes[1]*mnodes[2]*ncompu,
737                         PETSC_DECIDE, PETSC_DECIDE, user, &mat); CHKERRQ(ierr);
738   if (bpChoice == CEED_BP1 || bpChoice == CEED_BP2) {
739     ierr = MatShellSetOperation(mat, MATOP_MULT, (void(*)(void))MatMult_Mass);
740     CHKERRQ(ierr);
741   } else {
742     ierr = MatShellSetOperation(mat, MATOP_MULT, (void(*)(void))MatMult_Diff);
743     CHKERRQ(ierr);
744   }
745   ierr = MatCreateVecs(mat, &rhs, NULL); CHKERRQ(ierr);
746 
747   // Get RHS vector
748   ierr = VecDuplicate(Xloc, &rhsloc); CHKERRQ(ierr);
749   ierr = VecZeroEntries(rhsloc); CHKERRQ(ierr);
750   ierr = VecGetArray(rhsloc, &r); CHKERRQ(ierr);
751   CeedVectorSetArray(rhsceed, CEED_MEM_HOST, CEED_USE_POINTER, r);
752 
753   // Setup qdata, rhs, and target
754   CeedOperatorApply(op_setupgeo, xcoord, qdata, CEED_REQUEST_IMMEDIATE);
755   CeedOperatorApply(op_setuprhs, xcoord, rhsceed, CEED_REQUEST_IMMEDIATE);
756   ierr = CeedVectorSyncArray(rhsceed, CEED_MEM_HOST); CHKERRQ(ierr);
757   CeedVectorDestroy(&xcoord);
758 
759   // Gather RHS
760   ierr = VecRestoreArray(rhsloc, &r); CHKERRQ(ierr);
761   ierr = VecZeroEntries(rhs); CHKERRQ(ierr);
762   ierr = VecScatterBegin(ltog, rhsloc, rhs, ADD_VALUES, SCATTER_FORWARD);
763   CHKERRQ(ierr);
764   ierr = VecScatterEnd(ltog, rhsloc, rhs, ADD_VALUES, SCATTER_FORWARD);
765   CHKERRQ(ierr);
766   CeedVectorDestroy(&rhsceed);
767 
768   ierr = KSPCreate(comm, &ksp); CHKERRQ(ierr);
769   {
770     PC pc;
771     ierr = KSPGetPC(ksp, &pc); CHKERRQ(ierr);
772     if (bpChoice == CEED_BP1 || bpChoice == CEED_BP2) {
773       ierr = PCSetType(pc, PCJACOBI); CHKERRQ(ierr);
774       ierr = PCJacobiSetType(pc, PC_JACOBI_ROWSUM); CHKERRQ(ierr);
775     } else {
776       ierr = PCSetType(pc, PCNONE); CHKERRQ(ierr);
777     }
778     ierr = KSPSetType(ksp, KSPCG); CHKERRQ(ierr);
779     ierr = KSPSetNormType(ksp, KSP_NORM_NATURAL); CHKERRQ(ierr);
780     ierr = KSPSetTolerances(ksp, 1e-10, PETSC_DEFAULT, PETSC_DEFAULT,
781                             PETSC_DEFAULT); CHKERRQ(ierr);
782   }
783   ierr = KSPSetFromOptions(ksp); CHKERRQ(ierr);
784   ierr = KSPSetOperators(ksp, mat, mat); CHKERRQ(ierr);
785   // First run, if benchmarking
786   if (benchmark_mode) {
787     ierr = KSPSetTolerances(ksp, 1e-10, PETSC_DEFAULT, PETSC_DEFAULT, 1);
788     CHKERRQ(ierr);
789     my_rt_start = MPI_Wtime();
790     ierr = KSPSolve(ksp, rhs, X); CHKERRQ(ierr);
791     my_rt = MPI_Wtime() - my_rt_start;
792     ierr = MPI_Allreduce(MPI_IN_PLACE, &my_rt, 1, MPI_DOUBLE, MPI_MIN, comm);
793     CHKERRQ(ierr);
794     // Set maxits based on first iteration timing
795     if (my_rt > 0.02) {
796       ierr = KSPSetTolerances(ksp, 1e-10, PETSC_DEFAULT, PETSC_DEFAULT, 5);
797       CHKERRQ(ierr);
798     } else {
799       ierr = KSPSetTolerances(ksp, 1e-10, PETSC_DEFAULT, PETSC_DEFAULT, 20);
800       CHKERRQ(ierr);
801     }
802   }
803   // Timed solve
804   ierr = PetscBarrier((PetscObject)ksp); CHKERRQ(ierr);
805   my_rt_start = MPI_Wtime();
806   ierr = KSPSolve(ksp, rhs, X); CHKERRQ(ierr);
807   my_rt = MPI_Wtime() - my_rt_start;
808   {
809     KSPType ksptype;
810     KSPConvergedReason reason;
811     PetscReal rnorm;
812     PetscInt its;
813     ierr = KSPGetType(ksp, &ksptype); CHKERRQ(ierr);
814     ierr = KSPGetConvergedReason(ksp, &reason); CHKERRQ(ierr);
815     ierr = KSPGetIterationNumber(ksp, &its); CHKERRQ(ierr);
816     ierr = KSPGetResidualNorm(ksp, &rnorm); CHKERRQ(ierr);
817     if (!test_mode || reason < 0 || rnorm > 1e-8) {
818       ierr = MPI_Allreduce(&my_rt, &rt_min, 1, MPI_DOUBLE, MPI_MIN, comm);
819       CHKERRQ(ierr);
820       ierr = MPI_Allreduce(&my_rt, &rt_max, 1, MPI_DOUBLE, MPI_MAX, comm);
821       CHKERRQ(ierr);
822       ierr = PetscPrintf(comm,
823                          "  KSP:\n"
824                          "    KSP Type                           : %s\n"
825                          "    KSP Convergence                    : %s\n"
826                          "    Total KSP Iterations               : %D\n"
827                          "    Final rnorm                        : %e\n",
828                          ksptype, KSPConvergedReasons[reason], its,
829                          (double)rnorm); CHKERRQ(ierr);
830       ierr = PetscPrintf(comm,
831                          "  Performance:\n"
832                          "    CG Solve Time                      : %g (%g) sec\n"
833                          "    DoFs/Sec in CG                     : %g (%g) million\n",
834                          rt_max, rt_min, 1e-6*gsize*its/rt_max,
835                          1e-6*gsize*its/rt_min); CHKERRQ(ierr);
836     }
837     if (benchmark_mode && (!test_mode)) {
838       ierr = MPI_Allreduce(&my_rt, &rt_min, 1, MPI_DOUBLE, MPI_MIN, comm);
839       CHKERRQ(ierr);
840       ierr = MPI_Allreduce(&my_rt, &rt_max, 1, MPI_DOUBLE, MPI_MAX, comm);
841       CHKERRQ(ierr);
842       ierr = PetscPrintf(comm,
843                          "  Performance:\n"
844                          "    CG Solve Time                      : %g (%g) sec\n"
845                          "    DoFs/Sec in CG                     : %g (%g) million\n",
846                          rt_max, rt_min, 1e-6*gsize*its/rt_max,
847                          1e-6*gsize*its/rt_min); CHKERRQ(ierr);
848     }
849   }
850 
851   {
852     PetscReal maxerror;
853     ierr = ComputeErrorMax(user, op_error, X, target, &maxerror); CHKERRQ(ierr);
854     PetscReal tol = 5e-2;
855     if (!test_mode || maxerror > tol) {
856       ierr = PetscPrintf(comm,
857                          "    Pointwise Error (max)              : %e\n",
858                          (double)maxerror); CHKERRQ(ierr);
859     }
860   }
861 
862   if (write_solution) {
863     PetscViewer vtkviewersoln;
864 
865     ierr = PetscViewerCreate(comm, &vtkviewersoln); CHKERRQ(ierr);
866     ierr = PetscViewerSetType(vtkviewersoln, PETSCVIEWERVTK); CHKERRQ(ierr);
867     ierr = PetscViewerFileSetName(vtkviewersoln, "solution.vtk"); CHKERRQ(ierr);
868     ierr = VecView(X, vtkviewersoln); CHKERRQ(ierr);
869     ierr = PetscViewerDestroy(&vtkviewersoln); CHKERRQ(ierr);
870   }
871 
872   ierr = VecDestroy(&rhs); CHKERRQ(ierr);
873   ierr = VecDestroy(&rhsloc); CHKERRQ(ierr);
874   ierr = VecDestroy(&X); CHKERRQ(ierr);
875   ierr = VecDestroy(&user->Xloc); CHKERRQ(ierr);
876   ierr = VecDestroy(&user->Yloc); CHKERRQ(ierr);
877   ierr = VecScatterDestroy(&ltog); CHKERRQ(ierr);
878   ierr = VecScatterDestroy(&ltog0); CHKERRQ(ierr);
879   ierr = VecScatterDestroy(&gtogD); CHKERRQ(ierr);
880   ierr = MatDestroy(&mat); CHKERRQ(ierr);
881   ierr = KSPDestroy(&ksp); CHKERRQ(ierr);
882 
883   CeedVectorDestroy(&user->xceed);
884   CeedVectorDestroy(&user->yceed);
885   CeedVectorDestroy(&user->qdata);
886   CeedVectorDestroy(&target);
887   CeedOperatorDestroy(&op_setupgeo);
888   CeedOperatorDestroy(&op_setuprhs);
889   CeedOperatorDestroy(&op_apply);
890   CeedOperatorDestroy(&op_error);
891   CeedElemRestrictionDestroy(&Erestrictu);
892   CeedElemRestrictionDestroy(&Erestrictx);
893   CeedElemRestrictionDestroy(&Erestrictui);
894   CeedElemRestrictionDestroy(&Erestrictxi);
895   CeedElemRestrictionDestroy(&Erestrictqdi);
896   CeedQFunctionDestroy(&qf_setupgeo);
897   CeedQFunctionDestroy(&qf_setuprhs);
898   CeedQFunctionDestroy(&qf_apply);
899   CeedQFunctionDestroy(&qf_error);
900   CeedBasisDestroy(&basisu);
901   CeedBasisDestroy(&basisx);
902   CeedDestroy(&ceed);
903   ierr = PetscFree(user); CHKERRQ(ierr);
904   return PetscFinalize();
905 }
906