xref: /libCEED/examples/petsc/bpsraw.c (revision 61dbc9d25335c1a7855bdb901ed4d560d51eee1f)
1 // Copyright (c) 2017, Lawrence Livermore National Security, LLC. Produced at
2 // the Lawrence Livermore National Laboratory. LLNL-CODE-734707. All Rights
3 // reserved. See files LICENSE and NOTICE for details.
4 //
5 // This file is part of CEED, a collection of benchmarks, miniapps, software
6 // libraries and APIs for efficient high-order finite element and spectral
7 // element discretizations for exascale applications. For more information and
8 // source code availability see http://github.com/ceed.
9 //
10 // The CEED research is supported by the Exascale Computing Project 17-SC-20-SC,
11 // a collaborative effort of two U.S. Department of Energy organizations (Office
12 // of Science and the National Nuclear Security Administration) responsible for
13 // the planning and preparation of a capable exascale ecosystem, including
14 // software, applications, hardware, advanced system engineering and early
15 // testbed platforms, in support of the nation's exascale computing imperative.
16 
17 //                        libCEED + PETSc Example: CEED BPs
18 //
19 // This example demonstrates a simple usage of libCEED with PETSc to solve the
20 // CEED BP benchmark problems, see http://ceed.exascaleproject.org/bps.
21 //
22 // The code is intentionally "raw", using only low-level communication
23 // primitives.
24 //
25 // Build with:
26 //
27 //     make bpsraw [PETSC_DIR=</path/to/petsc>] [CEED_DIR=</path/to/libceed>]
28 //
29 // Sample runs:
30 //
31 //     ./bpsraw -problem bp1
32 //     ./bpsraw -problem bp2 -ceed /cpu/self
33 //     ./bpsraw -problem bp3 -ceed /gpu/occa
34 //     ./bpsraw -problem bp4 -ceed /cpu/occa
35 //     ./bpsraw -problem bp5 -ceed /omp/occa
36 //     ./bpsraw -problem bp6 -ceed /ocl/occa
37 //
38 //TESTARGS -ceed {ceed_resource} -test -problem bp2 -degree 5 -qextra 5
39 
40 /// @file
41 /// CEED BPs example using PETSc
42 /// See bps.c for an implementation using DMPlex unstructured grids.
43 const char help[] = "Solve CEED BPs using PETSc\n";
44 
45 #include <stdbool.h>
46 #include <string.h>
47 #include <petscksp.h>
48 #include <ceed.h>
49 #include "qfunctions/bps/common.h"
50 #include "qfunctions/bps/bp1.h"
51 #include "qfunctions/bps/bp2.h"
52 #include "qfunctions/bps/bp3.h"
53 #include "qfunctions/bps/bp4.h"
54 
55 static void Split3(PetscInt size, PetscInt m[3], bool reverse) {
56   for (PetscInt d=0,sizeleft=size; d<3; d++) {
57     PetscInt try = (PetscInt)PetscCeilReal(PetscPowReal(sizeleft, 1./(3 - d)));
58     while (try * (sizeleft / try) != sizeleft) try++;
59     m[reverse ? 2-d : d] = try;
60     sizeleft /= try;
61   }
62 }
63 
64 static PetscInt Max3(const PetscInt a[3]) {
65   return PetscMax(a[0], PetscMax(a[1], a[2]));
66 }
67 static PetscInt Min3(const PetscInt a[3]) {
68   return PetscMin(a[0], PetscMin(a[1], a[2]));
69 }
70 static void GlobalNodes(const PetscInt p[3], const PetscInt irank[3],
71                         PetscInt degree, const PetscInt melem[3],
72                         PetscInt mnodes[3]) {
73   for (int d=0; d<3; d++)
74     mnodes[d] = degree*melem[d] + (irank[d] == p[d]-1);
75 }
76 static PetscInt GlobalStart(const PetscInt p[3], const PetscInt irank[3],
77                             PetscInt degree, const PetscInt melem[3]) {
78   PetscInt start = 0;
79   // Dumb brute-force is easier to read
80   for (PetscInt i=0; i<p[0]; i++) {
81     for (PetscInt j=0; j<p[1]; j++) {
82       for (PetscInt k=0; k<p[2]; k++) {
83         PetscInt mnodes[3], ijkrank[] = {i,j,k};
84         if (i == irank[0] && j == irank[1] && k == irank[2]) return start;
85         GlobalNodes(p, ijkrank, degree, melem, mnodes);
86         start += mnodes[0] * mnodes[1] * mnodes[2];
87       }
88     }
89   }
90   return -1;
91 }
92 static int CreateRestriction(Ceed ceed, CeedInterlaceMode imode,
93                              const CeedInt melem[3], CeedInt P, CeedInt ncomp,
94                              CeedElemRestriction *Erestrict) {
95   const PetscInt nelem = melem[0]*melem[1]*melem[2];
96   PetscInt mnodes[3], *idx, *idxp;
97 
98   // Get indicies
99   for (int d=0; d<3; d++) mnodes[d] = melem[d]*(P-1) + 1;
100   idxp = idx = malloc(nelem*P*P*P*sizeof idx[0]);
101   for (CeedInt i=0; i<melem[0]; i++)
102     for (CeedInt j=0; j<melem[1]; j++)
103       for (CeedInt k=0; k<melem[2]; k++,idxp += P*P*P)
104         for (CeedInt ii=0; ii<P; ii++)
105           for (CeedInt jj=0; jj<P; jj++)
106             for (CeedInt kk=0; kk<P; kk++) {
107               if (0) { // This is the C-style (i,j,k) ordering that I prefer
108                 idxp[(ii*P+jj)*P+kk] = (((i*(P-1)+ii)*mnodes[1]
109                                          + (j*(P-1)+jj))*mnodes[2]
110                                         + (k*(P-1)+kk));
111               } else { // (k,j,i) ordering for consistency with MFEM example
112                 idxp[ii+P*(jj+P*kk)] = (((i*(P-1)+ii)*mnodes[1]
113                                          + (j*(P-1)+jj))*mnodes[2]
114                                         + (k*(P-1)+kk));
115               }
116             }
117 
118   // Setup CEED restriction
119   CeedElemRestrictionCreate(ceed, imode, nelem, P*P*P,
120                             mnodes[0]*mnodes[1]*mnodes[2], ncomp,
121                             CEED_MEM_HOST, CEED_OWN_POINTER, idx, Erestrict);
122 
123   PetscFunctionReturn(0);
124 }
125 
126 // Data for PETSc
127 typedef struct User_ *User;
128 struct User_ {
129   MPI_Comm comm;
130   VecScatter ltog;              // Scatter for all entries
131   VecScatter ltog0;             // Skip Dirichlet values
132   VecScatter gtogD;             // global-to-global; only Dirichlet values
133   Vec Xloc, Yloc;
134   CeedVector xceed, yceed;
135   CeedOperator op;
136   CeedVector qdata;
137   Ceed ceed;
138 };
139 
140 // BP Options
141 typedef enum {
142   CEED_BP1 = 0, CEED_BP2 = 1, CEED_BP3 = 2,
143   CEED_BP4 = 3, CEED_BP5 = 4, CEED_BP6 = 5
144 } bpType;
145 static const char *const bpTypes[] = {"bp1","bp2","bp3","bp4","bp5","bp6",
146                                       "bpType","CEED_BP",0
147                                      };
148 
149 // BP specific data
150 typedef struct {
151   CeedInt ncompu, qdatasize, qextra;
152   CeedQFunctionUser setupgeo, setuprhs, apply, error;
153   const char *setupgeofname, *setuprhsfname, *applyfname, *errorfname;
154   CeedEvalMode inmode, outmode;
155   CeedQuadMode qmode;
156 } bpData;
157 
158 bpData bpOptions[6] = {
159   [CEED_BP1] = {
160     .ncompu = 1,
161     .qdatasize = 1,
162     .qextra = 1,
163     .setupgeo = SetupMassGeo,
164     .setuprhs = SetupMassRhs,
165     .apply = Mass,
166     .error = Error,
167     .setupgeofname = SetupMassGeo_loc,
168     .setuprhsfname = SetupMassRhs_loc,
169     .applyfname = Mass_loc,
170     .errorfname = Error_loc,
171     .inmode = CEED_EVAL_INTERP,
172     .outmode = CEED_EVAL_INTERP,
173     .qmode = CEED_GAUSS
174   },
175   [CEED_BP2] = {
176     .ncompu = 3,
177     .qdatasize = 1,
178     .qextra = 1,
179     .setupgeo = SetupMassGeo,
180     .setuprhs = SetupMassRhs3,
181     .apply = Mass3,
182     .error = Error3,
183     .setupgeofname = SetupMassGeo_loc,
184     .setuprhsfname = SetupMassRhs3_loc,
185     .applyfname = Mass3_loc,
186     .errorfname = Error3_loc,
187     .inmode = CEED_EVAL_INTERP,
188     .outmode = CEED_EVAL_INTERP,
189     .qmode = CEED_GAUSS
190   },
191   [CEED_BP3] = {
192     .ncompu = 1,
193     .qdatasize = 6,
194     .qextra = 1,
195     .setupgeo = SetupDiffGeo,
196     .setuprhs = SetupDiffRhs,
197     .apply = Diff,
198     .error = Error,
199     .setupgeofname = SetupDiffGeo_loc,
200     .setuprhsfname = SetupDiffRhs_loc,
201     .applyfname = Diff_loc,
202     .errorfname = Error_loc,
203     .inmode = CEED_EVAL_GRAD,
204     .outmode = CEED_EVAL_GRAD,
205     .qmode = CEED_GAUSS
206   },
207   [CEED_BP4] = {
208     .ncompu = 3,
209     .qdatasize = 6,
210     .qextra = 1,
211     .setupgeo = SetupDiffGeo,
212     .setuprhs = SetupDiffRhs3,
213     .apply = Diff3,
214     .error = Error3,
215     .setupgeofname = SetupDiffGeo_loc,
216     .setuprhsfname = SetupDiffRhs3_loc,
217     .applyfname = Diff_loc,
218     .errorfname = Error3_loc,
219     .inmode = CEED_EVAL_GRAD,
220     .outmode = CEED_EVAL_GRAD,
221     .qmode = CEED_GAUSS
222   },
223   [CEED_BP5] = {
224     .ncompu = 1,
225     .qdatasize = 6,
226     .qextra = 0,
227     .setupgeo = SetupDiffGeo,
228     .setuprhs = SetupDiffRhs,
229     .apply = Diff,
230     .error = Error,
231     .setupgeofname = SetupDiffGeo_loc,
232     .setuprhsfname = SetupDiffRhs_loc,
233     .applyfname = Diff_loc,
234     .errorfname = Error_loc,
235     .inmode = CEED_EVAL_GRAD,
236     .outmode = CEED_EVAL_GRAD,
237     .qmode = CEED_GAUSS_LOBATTO
238   },
239   [CEED_BP6] = {
240     .ncompu = 3,
241     .qdatasize = 6,
242     .qextra = 0,
243     .setupgeo = SetupDiffGeo,
244     .setuprhs = SetupDiffRhs3,
245     .apply = Diff3,
246     .error = Error3,
247     .setupgeofname = SetupDiffGeo_loc,
248     .setuprhsfname = SetupDiffRhs3_loc,
249     .applyfname = Diff_loc,
250     .errorfname = Error3_loc,
251     .inmode = CEED_EVAL_GRAD,
252     .outmode = CEED_EVAL_GRAD,
253     .qmode = CEED_GAUSS_LOBATTO
254   }
255 };
256 
257 // This function uses libCEED to compute the action of the mass matrix
258 static PetscErrorCode MatMult_Mass(Mat A, Vec X, Vec Y) {
259   PetscErrorCode ierr;
260   User user;
261   PetscScalar *x, *y;
262 
263   PetscFunctionBeginUser;
264   ierr = MatShellGetContext(A, &user); CHKERRQ(ierr);
265   ierr = VecScatterBegin(user->ltog, X, user->Xloc, INSERT_VALUES,
266                          SCATTER_REVERSE); CHKERRQ(ierr);
267   ierr = VecScatterEnd(user->ltog, X, user->Xloc, INSERT_VALUES, SCATTER_REVERSE);
268   CHKERRQ(ierr);
269   ierr = VecZeroEntries(user->Yloc); CHKERRQ(ierr);
270 
271   ierr = VecGetArrayRead(user->Xloc, (const PetscScalar **)&x); CHKERRQ(ierr);
272   ierr = VecGetArray(user->Yloc, &y); CHKERRQ(ierr);
273   CeedVectorSetArray(user->xceed, CEED_MEM_HOST, CEED_USE_POINTER, x);
274   CeedVectorSetArray(user->yceed, CEED_MEM_HOST, CEED_USE_POINTER, y);
275 
276   CeedOperatorApply(user->op, user->xceed, user->yceed,
277                     CEED_REQUEST_IMMEDIATE);
278   ierr = CeedVectorSyncArray(user->yceed, CEED_MEM_HOST); CHKERRQ(ierr);
279 
280   ierr = VecRestoreArrayRead(user->Xloc, (const PetscScalar **)&x); CHKERRQ(ierr);
281   ierr = VecRestoreArray(user->Yloc, &y); CHKERRQ(ierr);
282 
283   if (Y) {
284     ierr = VecZeroEntries(Y); CHKERRQ(ierr);
285     ierr = VecScatterBegin(user->ltog, user->Yloc, Y, ADD_VALUES, SCATTER_FORWARD);
286     CHKERRQ(ierr);
287     ierr = VecScatterEnd(user->ltog, user->Yloc, Y, ADD_VALUES, SCATTER_FORWARD);
288     CHKERRQ(ierr);
289   }
290   PetscFunctionReturn(0);
291 }
292 
293 // This function uses libCEED to compute the action of the Laplacian with
294 // Dirichlet boundary conditions
295 static PetscErrorCode MatMult_Diff(Mat A, Vec X, Vec Y) {
296   PetscErrorCode ierr;
297   User user;
298   PetscScalar *x, *y;
299 
300   PetscFunctionBeginUser;
301   ierr = MatShellGetContext(A, &user); CHKERRQ(ierr);
302 
303   // Global-to-local
304   ierr = VecScatterBegin(user->ltog0, X, user->Xloc, INSERT_VALUES,
305                          SCATTER_REVERSE); CHKERRQ(ierr);
306   ierr = VecScatterEnd(user->ltog0, X, user->Xloc, INSERT_VALUES,
307                        SCATTER_REVERSE);
308   CHKERRQ(ierr);
309   ierr = VecZeroEntries(user->Yloc); CHKERRQ(ierr);
310 
311   // Setup CEED vectors
312   ierr = VecGetArrayRead(user->Xloc, (const PetscScalar **)&x); CHKERRQ(ierr);
313   ierr = VecGetArray(user->Yloc, &y); CHKERRQ(ierr);
314   CeedVectorSetArray(user->xceed, CEED_MEM_HOST, CEED_USE_POINTER, x);
315   CeedVectorSetArray(user->yceed, CEED_MEM_HOST, CEED_USE_POINTER, y);
316 
317   // Apply CEED operator
318   CeedOperatorApply(user->op, user->xceed, user->yceed,
319                     CEED_REQUEST_IMMEDIATE);
320   ierr = CeedVectorSyncArray(user->yceed, CEED_MEM_HOST); CHKERRQ(ierr);
321 
322   // Restore PETSc vectors
323   ierr = VecRestoreArrayRead(user->Xloc, (const PetscScalar **)&x); CHKERRQ(ierr);
324   ierr = VecRestoreArray(user->Yloc, &y); CHKERRQ(ierr);
325 
326   // Local-to-global
327   ierr = VecZeroEntries(Y); CHKERRQ(ierr);
328   ierr = VecScatterBegin(user->gtogD, X, Y, INSERT_VALUES, SCATTER_FORWARD);
329   CHKERRQ(ierr);
330   ierr = VecScatterEnd(user->gtogD, X, Y, INSERT_VALUES, SCATTER_FORWARD);
331   CHKERRQ(ierr);
332   ierr = VecScatterBegin(user->ltog0, user->Yloc, Y, ADD_VALUES, SCATTER_FORWARD);
333   CHKERRQ(ierr);
334   ierr = VecScatterEnd(user->ltog0, user->Yloc, Y, ADD_VALUES, SCATTER_FORWARD);
335   CHKERRQ(ierr);
336 
337   PetscFunctionReturn(0);
338 }
339 
340 // This function calculates the error in the final solution
341 static PetscErrorCode ComputeErrorMax(User user, CeedOperator op_error, Vec X,
342                                       CeedVector target, PetscReal *maxerror) {
343   PetscErrorCode ierr;
344   PetscScalar *x;
345   CeedVector collocated_error;
346   CeedInt length;
347 
348   PetscFunctionBeginUser;
349   CeedVectorGetLength(target, &length);
350   CeedVectorCreate(user->ceed, length, &collocated_error);
351 
352   // Global-to-local
353   ierr = VecScatterBegin(user->ltog, X, user->Xloc, INSERT_VALUES,
354                          SCATTER_REVERSE); CHKERRQ(ierr);
355   ierr = VecScatterEnd(user->ltog, X, user->Xloc, INSERT_VALUES, SCATTER_REVERSE);
356   CHKERRQ(ierr);
357 
358   // Setup CEED vector
359   ierr = VecGetArrayRead(user->Xloc, (const PetscScalar **)&x); CHKERRQ(ierr);
360   CeedVectorSetArray(user->xceed, CEED_MEM_HOST, CEED_USE_POINTER, x);
361 
362   // Apply CEED operator
363   CeedOperatorApply(op_error, user->xceed, collocated_error,
364                     CEED_REQUEST_IMMEDIATE);
365 
366   // Restore PETSc vector
367   VecRestoreArrayRead(user->Xloc, (const PetscScalar **)&x); CHKERRQ(ierr);
368 
369   // Reduce max error
370   *maxerror = 0;
371   const CeedScalar *e;
372   CeedVectorGetArrayRead(collocated_error, CEED_MEM_HOST, &e);
373   for (CeedInt i=0; i<length; i++) {
374     *maxerror = PetscMax(*maxerror, PetscAbsScalar(e[i]));
375   }
376   CeedVectorRestoreArrayRead(collocated_error, &e);
377   ierr = MPI_Allreduce(MPI_IN_PLACE, maxerror,
378                        1, MPIU_REAL, MPIU_MAX, user->comm); CHKERRQ(ierr);
379 
380   // Cleanup
381   CeedVectorDestroy(&collocated_error);
382 
383   PetscFunctionReturn(0);
384 }
385 
386 int main(int argc, char **argv) {
387   PetscInt ierr;
388   MPI_Comm comm;
389   char ceedresource[PETSC_MAX_PATH_LEN] = "/cpu/self";
390   double my_rt_start, my_rt, rt_min, rt_max;
391   PetscInt degree, qextra, localnodes, localelem, melem[3], mnodes[3], p[3],
392            irank[3], lnodes[3], lsize, ncompu = 1;
393   PetscScalar *r;
394   PetscBool test_mode, benchmark_mode, write_solution;
395   PetscMPIInt size, rank;
396   Vec X, Xloc, rhs, rhsloc;
397   Mat mat;
398   KSP ksp;
399   VecScatter ltog, ltog0, gtogD;
400   User user;
401   Ceed ceed;
402   CeedBasis basisx, basisu;
403   CeedInterlaceMode imodeceed = CEED_NONINTERLACED,
404                     imodepetsc = CEED_INTERLACED;
405   CeedElemRestriction Erestrictx, Erestrictu, Erestrictxi, Erestrictui,
406                       Erestrictqdi;
407   CeedQFunction qf_setupgeo, qf_setuprhs, qf_apply, qf_error;
408   CeedOperator op_setupgeo, op_setuprhs, op_apply, op_error;
409   CeedVector xcoord, qdata, rhsceed, target;
410   CeedInt P, Q;
411   const CeedInt dim = 3, ncompx = 3;
412   bpType bpChoice;
413 
414   ierr = PetscInitialize(&argc, &argv, NULL, help);
415   if (ierr) return ierr;
416   comm = PETSC_COMM_WORLD;
417 
418   // Read command line options
419   ierr = PetscOptionsBegin(comm, NULL, "CEED BPs in PETSc", NULL); CHKERRQ(ierr);
420   bpChoice = CEED_BP1;
421   ierr = PetscOptionsEnum("-problem",
422                           "CEED benchmark problem to solve", NULL,
423                           bpTypes, (PetscEnum)bpChoice, (PetscEnum *)&bpChoice,
424                           NULL); CHKERRQ(ierr);
425   ncompu = bpOptions[bpChoice].ncompu;
426   test_mode = PETSC_FALSE;
427   ierr = PetscOptionsBool("-test",
428                           "Testing mode (do not print unless error is large)",
429                           NULL, test_mode, &test_mode, NULL); CHKERRQ(ierr);
430   benchmark_mode = PETSC_FALSE;
431   ierr = PetscOptionsBool("-benchmark",
432                           "Benchmarking mode (prints benchmark statistics)",
433                           NULL, benchmark_mode, &benchmark_mode, NULL);
434   CHKERRQ(ierr);
435   write_solution = PETSC_FALSE;
436   ierr = PetscOptionsBool("-write_solution",
437                           "Write solution for visualization",
438                           NULL, write_solution, &write_solution, NULL);
439   CHKERRQ(ierr);
440   degree = test_mode ? 3 : 1;
441   ierr = PetscOptionsInt("-degree", "Polynomial degree of tensor product basis",
442                          NULL, degree, &degree, NULL); CHKERRQ(ierr);
443   qextra = bpOptions[bpChoice].qextra;
444   ierr = PetscOptionsInt("-qextra", "Number of extra quadrature points",
445                          NULL, qextra, &qextra, NULL); CHKERRQ(ierr);
446   ierr = PetscOptionsString("-ceed", "CEED resource specifier",
447                             NULL, ceedresource, ceedresource,
448                             sizeof(ceedresource), NULL); CHKERRQ(ierr);
449   localnodes = 1000;
450   ierr = PetscOptionsInt("-local",
451                          "Target number of locally owned nodes per process",
452                          NULL, localnodes, &localnodes, NULL); CHKERRQ(ierr);
453   ierr = PetscOptionsEnd(); CHKERRQ(ierr);
454   P = degree + 1;
455   Q = P + qextra;
456 
457   // Determine size of process grid
458   ierr = MPI_Comm_size(comm, &size); CHKERRQ(ierr);
459   Split3(size, p, false);
460 
461   // Find a nicely composite number of elements no less than localnodes
462   for (localelem = PetscMax(1, localnodes / (degree*degree*degree)); ;
463        localelem++) {
464     Split3(localelem, melem, true);
465     if (Max3(melem) / Min3(melem) <= 2) break;
466   }
467 
468   // Find my location in the process grid
469   ierr = MPI_Comm_rank(comm, &rank); CHKERRQ(ierr);
470   for (int d=0,rankleft=rank; d<dim; d++) {
471     const int pstride[3] = {p[1] *p[2], p[2], 1};
472     irank[d] = rankleft / pstride[d];
473     rankleft -= irank[d] * pstride[d];
474   }
475 
476   GlobalNodes(p, irank, degree, melem, mnodes);
477 
478   // Setup global vector
479   ierr = VecCreate(comm, &X); CHKERRQ(ierr);
480   ierr = VecSetSizes(X, mnodes[0]*mnodes[1]*mnodes[2]*ncompu, PETSC_DECIDE);
481   CHKERRQ(ierr);
482   ierr = VecSetUp(X); CHKERRQ(ierr);
483 
484   // Set up libCEED
485   CeedInit(ceedresource, &ceed);
486 
487   // Print summary
488   CeedInt gsize;
489   ierr = VecGetSize(X, &gsize); CHKERRQ(ierr);
490   if (!test_mode) {
491     const char *usedresource;
492     CeedGetResource(ceed, &usedresource);
493     ierr = PetscPrintf(comm,
494                        "\n-- CEED Benchmark Problem %d -- libCEED + PETSc --\n"
495                        "  libCEED:\n"
496                        "    libCEED Backend                    : %s\n"
497                        "  Mesh:\n"
498                        "    Number of 1D Basis Nodes (p)       : %d\n"
499                        "    Number of 1D Quadrature Points (q) : %d\n"
500                        "    Global nodes                       : %D\n"
501                        "    Process Decomposition              : %D %D %D\n"
502                        "    Local Elements                     : %D = %D %D %D\n"
503                        "    Owned nodes                        : %D = %D %D %D\n"
504                        "    DoF per node                       : %D\n",
505                        bpChoice+1, usedresource, P, Q,  gsize/ncompu, p[0],
506                        p[1], p[2], localelem, melem[0], melem[1], melem[2],
507                        mnodes[0]*mnodes[1]*mnodes[2], mnodes[0], mnodes[1],
508                        mnodes[2], ncompu); CHKERRQ(ierr);
509   }
510 
511   {
512     lsize = 1;
513     for (int d=0; d<dim; d++) {
514       lnodes[d] = melem[d]*degree + 1;
515       lsize *= lnodes[d];
516     }
517     ierr = VecCreate(PETSC_COMM_SELF, &Xloc); CHKERRQ(ierr);
518     ierr = VecSetSizes(Xloc, lsize*ncompu, PETSC_DECIDE); CHKERRQ(ierr);
519     ierr = VecSetUp(Xloc); CHKERRQ(ierr);
520 
521     // Create local-to-global scatter
522     PetscInt *ltogind, *ltogind0, *locind, l0count;
523     IS ltogis, ltogis0, locis;
524     PetscInt gstart[2][2][2], gmnodes[2][2][2][dim];
525 
526     for (int i=0; i<2; i++) {
527       for (int j=0; j<2; j++) {
528         for (int k=0; k<2; k++) {
529           PetscInt ijkrank[3] = {irank[0]+i, irank[1]+j, irank[2]+k};
530           gstart[i][j][k] = GlobalStart(p, ijkrank, degree, melem);
531           GlobalNodes(p, ijkrank, degree, melem, gmnodes[i][j][k]);
532         }
533       }
534     }
535 
536     ierr = PetscMalloc1(lsize, &ltogind); CHKERRQ(ierr);
537     ierr = PetscMalloc1(lsize, &ltogind0); CHKERRQ(ierr);
538     ierr = PetscMalloc1(lsize, &locind); CHKERRQ(ierr);
539     l0count = 0;
540     for (PetscInt i=0,ir,ii; ir=i>=mnodes[0], ii=i-ir*mnodes[0], i<lnodes[0]; i++)
541       for (PetscInt j=0,jr,jj; jr=j>=mnodes[1], jj=j-jr*mnodes[1], j<lnodes[1]; j++)
542         for (PetscInt k=0,kr,kk; kr=k>=mnodes[2], kk=k-kr*mnodes[2], k<lnodes[2]; k++) {
543           PetscInt here = (i*lnodes[1]+j)*lnodes[2]+k;
544           ltogind[here] =
545             gstart[ir][jr][kr] + (ii*gmnodes[ir][jr][kr][1]+jj)*gmnodes[ir][jr][kr][2]+kk;
546           if ((irank[0] == 0 && i == 0)
547               || (irank[1] == 0 && j == 0)
548               || (irank[2] == 0 && k == 0)
549               || (irank[0]+1 == p[0] && i+1 == lnodes[0])
550               || (irank[1]+1 == p[1] && j+1 == lnodes[1])
551               || (irank[2]+1 == p[2] && k+1 == lnodes[2]))
552             continue;
553           ltogind0[l0count] = ltogind[here];
554           locind[l0count++] = here;
555         }
556     ierr = ISCreateBlock(comm, ncompu, lsize, ltogind, PETSC_OWN_POINTER,
557                          &ltogis); CHKERRQ(ierr);
558     ierr = VecScatterCreate(Xloc, NULL, X, ltogis, &ltog); CHKERRQ(ierr);
559     CHKERRQ(ierr);
560     ierr = ISCreateBlock(comm, ncompu, l0count, ltogind0, PETSC_OWN_POINTER,
561                          &ltogis0); CHKERRQ(ierr);
562     ierr = ISCreateBlock(comm, ncompu, l0count, locind, PETSC_OWN_POINTER,
563                          &locis); CHKERRQ(ierr);
564     ierr = VecScatterCreate(Xloc, locis, X, ltogis0, &ltog0); CHKERRQ(ierr);
565     {
566       // Create global-to-global scatter for Dirichlet values (everything not in
567       // ltogis0, which is the range of ltog0)
568       PetscInt xstart, xend, *indD, countD = 0;
569       IS isD;
570       const PetscScalar *x;
571       ierr = VecZeroEntries(Xloc); CHKERRQ(ierr);
572       ierr = VecSet(X, 1.0); CHKERRQ(ierr);
573       ierr = VecScatterBegin(ltog0, Xloc, X, INSERT_VALUES, SCATTER_FORWARD);
574       CHKERRQ(ierr);
575       ierr = VecScatterEnd(ltog0, Xloc, X, INSERT_VALUES, SCATTER_FORWARD);
576       CHKERRQ(ierr);
577       ierr = VecGetOwnershipRange(X, &xstart, &xend); CHKERRQ(ierr);
578       ierr = PetscMalloc1(xend-xstart, &indD); CHKERRQ(ierr);
579       ierr = VecGetArrayRead(X, &x); CHKERRQ(ierr);
580       for (PetscInt i=0; i<xend-xstart; i++) {
581         if (x[i] == 1.) indD[countD++] = xstart + i;
582       }
583       ierr = VecRestoreArrayRead(X, &x); CHKERRQ(ierr);
584       ierr = ISCreateGeneral(comm, countD, indD, PETSC_COPY_VALUES, &isD);
585       CHKERRQ(ierr);
586       ierr = PetscFree(indD); CHKERRQ(ierr);
587       ierr = VecScatterCreate(X, isD, X, isD, &gtogD); CHKERRQ(ierr);
588       ierr = ISDestroy(&isD); CHKERRQ(ierr);
589     }
590     ierr = ISDestroy(&ltogis); CHKERRQ(ierr);
591     ierr = ISDestroy(&ltogis0); CHKERRQ(ierr);
592     ierr = ISDestroy(&locis); CHKERRQ(ierr);
593   }
594 
595   // CEED bases
596   CeedBasisCreateTensorH1Lagrange(ceed, dim, ncompu, P, Q,
597                                   bpOptions[bpChoice].qmode, &basisu);
598   CeedBasisCreateTensorH1Lagrange(ceed, dim, ncompx, 2, Q,
599                                   bpOptions[bpChoice].qmode, &basisx);
600 
601   // CEED restrictions
602   CreateRestriction(ceed, imodepetsc, melem, P, ncompu, &Erestrictu);
603   CreateRestriction(ceed, imodeceed, melem, 2, dim, &Erestrictx);
604   CeedInt nelem = melem[0]*melem[1]*melem[2];
605   CeedElemRestrictionCreateIdentity(ceed, imodeceed, nelem, Q*Q*Q, nelem*Q*Q*Q,
606                                     ncompu, &Erestrictui);
607   CeedElemRestrictionCreateIdentity(ceed, imodeceed, nelem, Q*Q*Q, nelem*Q*Q*Q,
608                                     bpOptions[bpChoice].qdatasize,
609                                     &Erestrictqdi);
610   CeedElemRestrictionCreateIdentity(ceed, imodeceed, nelem, Q*Q*Q, nelem*Q*Q*Q,
611                                     1, &Erestrictxi);
612   {
613     CeedScalar *xloc;
614     CeedInt shape[3] = {melem[0]+1, melem[1]+1, melem[2]+1}, len =
615                          shape[0]*shape[1]*shape[2];
616     xloc = malloc(len*ncompx*sizeof xloc[0]);
617     for (CeedInt i=0; i<shape[0]; i++) {
618       for (CeedInt j=0; j<shape[1]; j++) {
619         for (CeedInt k=0; k<shape[2]; k++) {
620           xloc[((i*shape[1]+j)*shape[2]+k) + 0*len] = 1.*(irank[0]*melem[0]+i) /
621               (p[0]*melem[0]);
622           xloc[((i*shape[1]+j)*shape[2]+k) + 1*len] = 1.*(irank[1]*melem[1]+j) /
623               (p[1]*melem[1]);
624           xloc[((i*shape[1]+j)*shape[2]+k) + 2*len] = 1.*(irank[2]*melem[2]+k) /
625               (p[2]*melem[2]);
626         }
627       }
628     }
629     CeedVectorCreate(ceed, len*ncompx, &xcoord);
630     CeedVectorSetArray(xcoord, CEED_MEM_HOST, CEED_OWN_POINTER, xloc);
631   }
632 
633   // Create the Qfunction that builds the operator quadrature data
634   CeedQFunctionCreateInterior(ceed, 1, bpOptions[bpChoice].setupgeo,
635                               bpOptions[bpChoice].setupgeofname, &qf_setupgeo);
636   CeedQFunctionAddInput(qf_setupgeo, "dx", ncompx*dim, CEED_EVAL_GRAD);
637   CeedQFunctionAddInput(qf_setupgeo, "weight", 1, CEED_EVAL_WEIGHT);
638   CeedQFunctionAddOutput(qf_setupgeo, "qdata", bpOptions[bpChoice].qdatasize,
639                          CEED_EVAL_NONE);
640 
641   // Create the Qfunction that sets up the RHS and true solution
642   CeedQFunctionCreateInterior(ceed, 1, bpOptions[bpChoice].setuprhs,
643                               bpOptions[bpChoice].setuprhsfname, &qf_setuprhs);
644   CeedQFunctionAddInput(qf_setuprhs, "x", ncompx, CEED_EVAL_INTERP);
645   CeedQFunctionAddInput(qf_setuprhs, "dx", ncompx*dim, CEED_EVAL_GRAD);
646   CeedQFunctionAddInput(qf_setuprhs, "weight", 1, CEED_EVAL_WEIGHT);
647   CeedQFunctionAddOutput(qf_setuprhs, "true_soln", ncompu, CEED_EVAL_NONE);
648   CeedQFunctionAddOutput(qf_setuprhs, "rhs", ncompu, CEED_EVAL_INTERP);
649 
650   // Set up PDE operator
651   CeedQFunctionCreateInterior(ceed, 1, bpOptions[bpChoice].apply,
652                               bpOptions[bpChoice].applyfname, &qf_apply);
653   // Add inputs and outputs
654   CeedInt inscale = bpOptions[bpChoice].inmode==CEED_EVAL_GRAD ? 3 : 1;
655   CeedInt outscale = bpOptions[bpChoice].outmode==CEED_EVAL_GRAD ? 3 : 1;
656   CeedQFunctionAddInput(qf_apply, "u", ncompu*inscale,
657                         bpOptions[bpChoice].inmode);
658   CeedQFunctionAddInput(qf_apply, "qdata", bpOptions[bpChoice].qdatasize,
659                         CEED_EVAL_NONE);
660   CeedQFunctionAddOutput(qf_apply, "v", ncompu*outscale,
661                          bpOptions[bpChoice].outmode);
662 
663   // Create the error qfunction
664   CeedQFunctionCreateInterior(ceed, 1, bpOptions[bpChoice].error,
665                               bpOptions[bpChoice].errorfname, &qf_error);
666   CeedQFunctionAddInput(qf_error, "u", ncompu, CEED_EVAL_INTERP);
667   CeedQFunctionAddInput(qf_error, "true_soln", ncompu, CEED_EVAL_NONE);
668   CeedQFunctionAddOutput(qf_error, "error", ncompu, CEED_EVAL_NONE);
669 
670   // Create the persistent vectors that will be needed in setup
671   CeedInt nqpts;
672   CeedBasisGetNumQuadraturePoints(basisu, &nqpts);
673   CeedVectorCreate(ceed, bpOptions[bpChoice].qdatasize*nelem*nqpts, &qdata);
674   CeedVectorCreate(ceed, nelem*nqpts*ncompu, &target);
675   CeedVectorCreate(ceed, lsize*ncompu, &rhsceed);
676 
677   // Create the operator that builds the quadrature data for the ceed operator
678   CeedOperatorCreate(ceed, qf_setupgeo, CEED_QFUNCTION_NONE,
679                      CEED_QFUNCTION_NONE, &op_setupgeo);
680   CeedOperatorSetField(op_setupgeo, "dx", Erestrictx, basisx,
681                        CEED_VECTOR_ACTIVE);
682   CeedOperatorSetField(op_setupgeo, "weight", Erestrictxi, basisx,
683                        CEED_VECTOR_NONE);
684   CeedOperatorSetField(op_setupgeo, "qdata", Erestrictqdi,
685                        CEED_BASIS_COLLOCATED, CEED_VECTOR_ACTIVE);
686 
687   // Create the operator that builds the RHS and true solution
688   CeedOperatorCreate(ceed, qf_setuprhs, CEED_QFUNCTION_NONE,
689                      CEED_QFUNCTION_NONE, &op_setuprhs);
690   CeedOperatorSetField(op_setuprhs, "x", Erestrictx, basisx,
691                        CEED_VECTOR_ACTIVE);
692   CeedOperatorSetField(op_setuprhs, "dx", Erestrictx, basisx,
693                        CEED_VECTOR_ACTIVE);
694   CeedOperatorSetField(op_setuprhs, "weight", Erestrictxi, basisx,
695                        CEED_VECTOR_NONE);
696   CeedOperatorSetField(op_setuprhs, "true_soln", Erestrictui,
697                        CEED_BASIS_COLLOCATED, target);
698   CeedOperatorSetField(op_setuprhs, "rhs", Erestrictu, basisu,
699                        CEED_VECTOR_ACTIVE);
700 
701   // Create the mass or diff operator
702   CeedOperatorCreate(ceed, qf_apply, CEED_QFUNCTION_NONE, CEED_QFUNCTION_NONE,
703                      &op_apply);
704   CeedOperatorSetField(op_apply, "u", Erestrictu, basisu, CEED_VECTOR_ACTIVE);
705   CeedOperatorSetField(op_apply, "qdata", Erestrictqdi, CEED_BASIS_COLLOCATED,
706                        qdata);
707   CeedOperatorSetField(op_apply, "v", Erestrictu, basisu, CEED_VECTOR_ACTIVE);
708 
709   // Create the error operator
710   CeedOperatorCreate(ceed, qf_error, CEED_QFUNCTION_NONE, CEED_QFUNCTION_NONE,
711                      &op_error);
712   CeedOperatorSetField(op_error, "u", Erestrictu, basisu, CEED_VECTOR_ACTIVE);
713   CeedOperatorSetField(op_error, "true_soln", Erestrictui,
714                        CEED_BASIS_COLLOCATED, target);
715   CeedOperatorSetField(op_error, "error", Erestrictui, CEED_BASIS_COLLOCATED,
716                        CEED_VECTOR_ACTIVE);
717 
718   // Set up Mat
719   ierr = PetscMalloc1(1, &user); CHKERRQ(ierr);
720   user->comm = comm;
721   user->ltog = ltog;
722   if (bpChoice != CEED_BP1 && bpChoice != CEED_BP2) {
723     user->ltog0 = ltog0;
724     user->gtogD = gtogD;
725   }
726   user->Xloc = Xloc;
727   ierr = VecDuplicate(Xloc, &user->Yloc); CHKERRQ(ierr);
728   CeedVectorCreate(ceed, lsize*ncompu, &user->xceed);
729   CeedVectorCreate(ceed, lsize*ncompu, &user->yceed);
730   user->op = op_apply;
731   user->qdata = qdata;
732   user->ceed = ceed;
733 
734   ierr = MatCreateShell(comm, mnodes[0]*mnodes[1]*mnodes[2]*ncompu,
735                         mnodes[0]*mnodes[1]*mnodes[2]*ncompu,
736                         PETSC_DECIDE, PETSC_DECIDE, user, &mat); CHKERRQ(ierr);
737   if (bpChoice == CEED_BP1 || bpChoice == CEED_BP2) {
738     ierr = MatShellSetOperation(mat, MATOP_MULT, (void(*)(void))MatMult_Mass);
739     CHKERRQ(ierr);
740   } else {
741     ierr = MatShellSetOperation(mat, MATOP_MULT, (void(*)(void))MatMult_Diff);
742     CHKERRQ(ierr);
743   }
744   ierr = MatCreateVecs(mat, &rhs, NULL); CHKERRQ(ierr);
745 
746   // Get RHS vector
747   ierr = VecDuplicate(Xloc, &rhsloc); CHKERRQ(ierr);
748   ierr = VecZeroEntries(rhsloc); CHKERRQ(ierr);
749   ierr = VecGetArray(rhsloc, &r); CHKERRQ(ierr);
750   CeedVectorSetArray(rhsceed, CEED_MEM_HOST, CEED_USE_POINTER, r);
751 
752   // Setup qdata, rhs, and target
753   CeedOperatorApply(op_setupgeo, xcoord, qdata, CEED_REQUEST_IMMEDIATE);
754   CeedOperatorApply(op_setuprhs, xcoord, rhsceed, CEED_REQUEST_IMMEDIATE);
755   ierr = CeedVectorSyncArray(rhsceed, CEED_MEM_HOST); CHKERRQ(ierr);
756   CeedVectorDestroy(&xcoord);
757 
758   // Gather RHS
759   ierr = VecRestoreArray(rhsloc, &r); CHKERRQ(ierr);
760   ierr = VecZeroEntries(rhs); CHKERRQ(ierr);
761   ierr = VecScatterBegin(ltog, rhsloc, rhs, ADD_VALUES, SCATTER_FORWARD);
762   CHKERRQ(ierr);
763   ierr = VecScatterEnd(ltog, rhsloc, rhs, ADD_VALUES, SCATTER_FORWARD);
764   CHKERRQ(ierr);
765   CeedVectorDestroy(&rhsceed);
766 
767   ierr = KSPCreate(comm, &ksp); CHKERRQ(ierr);
768   {
769     PC pc;
770     ierr = KSPGetPC(ksp, &pc); CHKERRQ(ierr);
771     if (bpChoice == CEED_BP1 || bpChoice == CEED_BP2) {
772       ierr = PCSetType(pc, PCJACOBI); CHKERRQ(ierr);
773       ierr = PCJacobiSetType(pc, PC_JACOBI_ROWSUM); CHKERRQ(ierr);
774     } else {
775       ierr = PCSetType(pc, PCNONE); CHKERRQ(ierr);
776     }
777     ierr = KSPSetType(ksp, KSPCG); CHKERRQ(ierr);
778     ierr = KSPSetNormType(ksp, KSP_NORM_NATURAL); CHKERRQ(ierr);
779     ierr = KSPSetTolerances(ksp, 1e-10, PETSC_DEFAULT, PETSC_DEFAULT,
780                             PETSC_DEFAULT); CHKERRQ(ierr);
781   }
782   ierr = KSPSetFromOptions(ksp); CHKERRQ(ierr);
783   ierr = KSPSetOperators(ksp, mat, mat); CHKERRQ(ierr);
784   // First run, if benchmarking
785   if (benchmark_mode) {
786     ierr = KSPSetTolerances(ksp, 1e-10, PETSC_DEFAULT, PETSC_DEFAULT, 1);
787     CHKERRQ(ierr);
788     my_rt_start = MPI_Wtime();
789     ierr = KSPSolve(ksp, rhs, X); CHKERRQ(ierr);
790     my_rt = MPI_Wtime() - my_rt_start;
791     ierr = MPI_Allreduce(MPI_IN_PLACE, &my_rt, 1, MPI_DOUBLE, MPI_MIN, comm);
792     CHKERRQ(ierr);
793     // Set maxits based on first iteration timing
794     if (my_rt > 0.02) {
795       ierr = KSPSetTolerances(ksp, 1e-10, PETSC_DEFAULT, PETSC_DEFAULT, 5);
796       CHKERRQ(ierr);
797     } else {
798       ierr = KSPSetTolerances(ksp, 1e-10, PETSC_DEFAULT, PETSC_DEFAULT, 20);
799       CHKERRQ(ierr);
800     }
801   }
802   // Timed solve
803   ierr = PetscBarrier((PetscObject)ksp); CHKERRQ(ierr);
804   my_rt_start = MPI_Wtime();
805   ierr = KSPSolve(ksp, rhs, X); CHKERRQ(ierr);
806   my_rt = MPI_Wtime() - my_rt_start;
807   {
808     KSPType ksptype;
809     KSPConvergedReason reason;
810     PetscReal rnorm;
811     PetscInt its;
812     ierr = KSPGetType(ksp, &ksptype); CHKERRQ(ierr);
813     ierr = KSPGetConvergedReason(ksp, &reason); CHKERRQ(ierr);
814     ierr = KSPGetIterationNumber(ksp, &its); CHKERRQ(ierr);
815     ierr = KSPGetResidualNorm(ksp, &rnorm); CHKERRQ(ierr);
816     if (!test_mode || reason < 0 || rnorm > 1e-8) {
817       ierr = MPI_Allreduce(&my_rt, &rt_min, 1, MPI_DOUBLE, MPI_MIN, comm);
818       CHKERRQ(ierr);
819       ierr = MPI_Allreduce(&my_rt, &rt_max, 1, MPI_DOUBLE, MPI_MAX, comm);
820       CHKERRQ(ierr);
821       ierr = PetscPrintf(comm,
822                          "  KSP:\n"
823                          "    KSP Type                           : %s\n"
824                          "    KSP Convergence                    : %s\n"
825                          "    Total KSP Iterations               : %D\n"
826                          "    Final rnorm                        : %e\n",
827                          ksptype, KSPConvergedReasons[reason], its,
828                          (double)rnorm); CHKERRQ(ierr);
829       ierr = PetscPrintf(comm,
830                          "  Performance:\n"
831                          "    CG Solve Time                      : %g (%g) sec\n"
832                          "    DoFs/Sec in CG                     : %g (%g) million\n",
833                          rt_max, rt_min, 1e-6*gsize*its/rt_max,
834                          1e-6*gsize*its/rt_min); CHKERRQ(ierr);
835     }
836     if (benchmark_mode && (!test_mode)) {
837       ierr = MPI_Allreduce(&my_rt, &rt_min, 1, MPI_DOUBLE, MPI_MIN, comm);
838       CHKERRQ(ierr);
839       ierr = MPI_Allreduce(&my_rt, &rt_max, 1, MPI_DOUBLE, MPI_MAX, comm);
840       CHKERRQ(ierr);
841       ierr = PetscPrintf(comm,
842                          "  Performance:\n"
843                          "    CG Solve Time                      : %g (%g) sec\n"
844                          "    DoFs/Sec in CG                     : %g (%g) million\n",
845                          rt_max, rt_min, 1e-6*gsize*its/rt_max,
846                          1e-6*gsize*its/rt_min); CHKERRQ(ierr);
847     }
848   }
849 
850   {
851     PetscReal maxerror;
852     ierr = ComputeErrorMax(user, op_error, X, target, &maxerror); CHKERRQ(ierr);
853     PetscReal tol = (bpChoice == CEED_BP1 || bpChoice == CEED_BP2) ? 5e-3 : 5e-2;
854     if (!test_mode || maxerror > tol) {
855       ierr = PetscPrintf(comm,
856                          "    Pointwise Error (max)              : %e\n",
857                          (double)maxerror); CHKERRQ(ierr);
858     }
859   }
860 
861   if (write_solution) {
862     PetscViewer vtkviewersoln;
863 
864     ierr = PetscViewerCreate(comm, &vtkviewersoln); CHKERRQ(ierr);
865     ierr = PetscViewerSetType(vtkviewersoln, PETSCVIEWERVTK); CHKERRQ(ierr);
866     ierr = PetscViewerFileSetName(vtkviewersoln, "solution.vtk"); CHKERRQ(ierr);
867     ierr = VecView(X, vtkviewersoln); CHKERRQ(ierr);
868     ierr = PetscViewerDestroy(&vtkviewersoln); CHKERRQ(ierr);
869   }
870 
871   ierr = VecDestroy(&rhs); CHKERRQ(ierr);
872   ierr = VecDestroy(&rhsloc); CHKERRQ(ierr);
873   ierr = VecDestroy(&X); CHKERRQ(ierr);
874   ierr = VecDestroy(&user->Xloc); CHKERRQ(ierr);
875   ierr = VecDestroy(&user->Yloc); CHKERRQ(ierr);
876   ierr = VecScatterDestroy(&ltog); CHKERRQ(ierr);
877   ierr = VecScatterDestroy(&ltog0); CHKERRQ(ierr);
878   ierr = VecScatterDestroy(&gtogD); CHKERRQ(ierr);
879   ierr = MatDestroy(&mat); CHKERRQ(ierr);
880   ierr = KSPDestroy(&ksp); CHKERRQ(ierr);
881 
882   CeedVectorDestroy(&user->xceed);
883   CeedVectorDestroy(&user->yceed);
884   CeedVectorDestroy(&user->qdata);
885   CeedVectorDestroy(&target);
886   CeedOperatorDestroy(&op_setupgeo);
887   CeedOperatorDestroy(&op_setuprhs);
888   CeedOperatorDestroy(&op_apply);
889   CeedOperatorDestroy(&op_error);
890   CeedElemRestrictionDestroy(&Erestrictu);
891   CeedElemRestrictionDestroy(&Erestrictx);
892   CeedElemRestrictionDestroy(&Erestrictui);
893   CeedElemRestrictionDestroy(&Erestrictxi);
894   CeedElemRestrictionDestroy(&Erestrictqdi);
895   CeedQFunctionDestroy(&qf_setupgeo);
896   CeedQFunctionDestroy(&qf_setuprhs);
897   CeedQFunctionDestroy(&qf_apply);
898   CeedQFunctionDestroy(&qf_error);
899   CeedBasisDestroy(&basisu);
900   CeedBasisDestroy(&basisx);
901   CeedDestroy(&ceed);
902   ierr = PetscFree(user); CHKERRQ(ierr);
903   return PetscFinalize();
904 }
905