xref: /libCEED/examples/petsc/bpsraw.c (revision 4092d0ee9dee1dc94927b92ec4a4f5b5b7bb02dc)
1 // Copyright (c) 2017, Lawrence Livermore National Security, LLC. Produced at
2 // the Lawrence Livermore National Laboratory. LLNL-CODE-734707. All Rights
3 // reserved. See files LICENSE and NOTICE for details.
4 //
5 // This file is part of CEED, a collection of benchmarks, miniapps, software
6 // libraries and APIs for efficient high-order finite element and spectral
7 // element discretizations for exascale applications. For more information and
8 // source code availability see http://github.com/ceed.
9 //
10 // The CEED research is supported by the Exascale Computing Project 17-SC-20-SC,
11 // a collaborative effort of two U.S. Department of Energy organizations (Office
12 // of Science and the National Nuclear Security Administration) responsible for
13 // the planning and preparation of a capable exascale ecosystem, including
14 // software, applications, hardware, advanced system engineering and early
15 // testbed platforms, in support of the nation's exascale computing imperative.
16 
17 //                        libCEED + PETSc Example: CEED BPs
18 //
19 // This example demonstrates a simple usage of libCEED with PETSc to solve the
20 // CEED BP benchmark problems, see http://ceed.exascaleproject.org/bps.
21 //
22 // The code is intentionally "raw", using only low-level communication
23 // primitives.
24 //
25 // Build with:
26 //
27 //     make bpsraw [PETSC_DIR=</path/to/petsc>] [CEED_DIR=</path/to/libceed>]
28 //
29 // Sample runs:
30 //
31 //     ./bpsraw -problem bp1
32 //     ./bpsraw -problem bp2 -ceed /cpu/self
33 //     ./bpsraw -problem bp3 -ceed /gpu/occa
34 //     ./bpsraw -problem bp4 -ceed /cpu/occa
35 //     ./bpsraw -problem bp5 -ceed /omp/occa
36 //     ./bpsraw -problem bp6 -ceed /ocl/occa
37 //
38 //TESTARGS -ceed {ceed_resource} -test -problem bp2 -degree 5 -qextra 5
39 
40 /// @file
41 /// CEED BPs example using PETSc
42 /// See bps.c for an implementation using DMPlex unstructured grids.
43 const char help[] = "Solve CEED BPs using PETSc\n";
44 
45 #include <stdbool.h>
46 #include <string.h>
47 #include <petscksp.h>
48 #include <ceed.h>
49 #include "qfunctions/bps/common.h"
50 #include "qfunctions/bps/bp1.h"
51 #include "qfunctions/bps/bp2.h"
52 #include "qfunctions/bps/bp3.h"
53 #include "qfunctions/bps/bp4.h"
54 
55 static void Split3(PetscInt size, PetscInt m[3], bool reverse) {
56   for (PetscInt d=0,sizeleft=size; d<3; d++) {
57     PetscInt try = (PetscInt)PetscCeilReal(PetscPowReal(sizeleft, 1./(3 - d)));
58     while (try * (sizeleft / try) != sizeleft) try++;
59     m[reverse ? 2-d : d] = try;
60     sizeleft /= try;
61   }
62 }
63 
64 static PetscInt Max3(const PetscInt a[3]) {
65   return PetscMax(a[0], PetscMax(a[1], a[2]));
66 }
67 static PetscInt Min3(const PetscInt a[3]) {
68   return PetscMin(a[0], PetscMin(a[1], a[2]));
69 }
70 static void GlobalNodes(const PetscInt p[3], const PetscInt irank[3],
71                         PetscInt degree, const PetscInt melem[3],
72                         PetscInt mnodes[3]) {
73   for (int d=0; d<3; d++)
74     mnodes[d] = degree*melem[d] + (irank[d] == p[d]-1);
75 }
76 static PetscInt GlobalStart(const PetscInt p[3], const PetscInt irank[3],
77                             PetscInt degree, const PetscInt melem[3]) {
78   PetscInt start = 0;
79   // Dumb brute-force is easier to read
80   for (PetscInt i=0; i<p[0]; i++) {
81     for (PetscInt j=0; j<p[1]; j++) {
82       for (PetscInt k=0; k<p[2]; k++) {
83         PetscInt mnodes[3], ijkrank[] = {i,j,k};
84         if (i == irank[0] && j == irank[1] && k == irank[2]) return start;
85         GlobalNodes(p, ijkrank, degree, melem, mnodes);
86         start += mnodes[0] * mnodes[1] * mnodes[2];
87       }
88     }
89   }
90   return -1;
91 }
92 static int CreateRestriction(Ceed ceed, CeedInterlaceMode imode,
93                              const CeedInt melem[3], CeedInt P, CeedInt ncomp,
94                              CeedElemRestriction *Erestrict) {
95   const PetscInt nelem = melem[0]*melem[1]*melem[2];
96   PetscInt mnodes[3], *idx, *idxp;
97 
98   // Get indicies
99   for (int d=0; d<3; d++) mnodes[d] = melem[d]*(P-1) + 1;
100   idxp = idx = malloc(nelem*P*P*P*sizeof idx[0]);
101   for (CeedInt i=0; i<melem[0]; i++)
102     for (CeedInt j=0; j<melem[1]; j++)
103       for (CeedInt k=0; k<melem[2]; k++,idxp += P*P*P)
104         for (CeedInt ii=0; ii<P; ii++)
105           for (CeedInt jj=0; jj<P; jj++)
106             for (CeedInt kk=0; kk<P; kk++) {
107               if (0) { // This is the C-style (i,j,k) ordering that I prefer
108                 idxp[(ii*P+jj)*P+kk] = (((i*(P-1)+ii)*mnodes[1]
109                                          + (j*(P-1)+jj))*mnodes[2]
110                                         + (k*(P-1)+kk));
111               } else { // (k,j,i) ordering for consistency with MFEM example
112                 idxp[ii+P*(jj+P*kk)] = (((i*(P-1)+ii)*mnodes[1]
113                                          + (j*(P-1)+jj))*mnodes[2]
114                                         + (k*(P-1)+kk));
115               }
116             }
117 
118   // Setup CEED restriction
119   CeedElemRestrictionCreate(ceed, imode, nelem, P*P*P,
120                             mnodes[0]*mnodes[1]*mnodes[2], ncomp,
121                             CEED_MEM_HOST, CEED_OWN_POINTER, idx, Erestrict);
122 
123   PetscFunctionReturn(0);
124 }
125 
126 // Data for PETSc
127 typedef struct User_ *User;
128 struct User_ {
129   MPI_Comm comm;
130   VecScatter ltog;              // Scatter for all entries
131   VecScatter ltog0;             // Skip Dirichlet values
132   VecScatter gtogD;             // global-to-global; only Dirichlet values
133   Vec Xloc, Yloc;
134   CeedVector xceed, yceed;
135   CeedOperator op;
136   CeedVector qdata;
137   Ceed ceed;
138 };
139 
140 // BP Options
141 typedef enum {
142   CEED_BP1 = 0, CEED_BP2 = 1, CEED_BP3 = 2,
143   CEED_BP4 = 3, CEED_BP5 = 4, CEED_BP6 = 5
144 } bpType;
145 static const char *const bpTypes[] = {"bp1","bp2","bp3","bp4","bp5","bp6",
146                                       "bpType","CEED_BP",0
147                                      };
148 
149 // BP specific data
150 typedef struct {
151   CeedInt ncompu, qdatasize, qextra;
152   CeedQFunctionUser setupgeo, setuprhs, apply, error;
153   const char *setupgeofname, *setuprhsfname, *applyfname, *errorfname;
154   CeedEvalMode inmode, outmode;
155   CeedQuadMode qmode;
156 } bpData;
157 
158 bpData bpOptions[6] = {
159   [CEED_BP1] = {
160     .ncompu = 1,
161     .qdatasize = 1,
162     .qextra = 1,
163     .setupgeo = SetupMassGeo,
164     .setuprhs = SetupMassRhs,
165     .apply = Mass,
166     .error = Error,
167     .setupgeofname = SetupMassGeo_loc,
168     .setuprhsfname = SetupMassRhs_loc,
169     .applyfname = Mass_loc,
170     .errorfname = Error_loc,
171     .inmode = CEED_EVAL_INTERP,
172     .outmode = CEED_EVAL_INTERP,
173     .qmode = CEED_GAUSS
174   },
175   [CEED_BP2] = {
176     .ncompu = 3,
177     .qdatasize = 1,
178     .qextra = 1,
179     .setupgeo = SetupMassGeo,
180     .setuprhs = SetupMassRhs3,
181     .apply = Mass3,
182     .error = Error3,
183     .setupgeofname = SetupMassGeo_loc,
184     .setuprhsfname = SetupMassRhs3_loc,
185     .applyfname = Mass3_loc,
186     .errorfname = Error3_loc,
187     .inmode = CEED_EVAL_INTERP,
188     .outmode = CEED_EVAL_INTERP,
189     .qmode = CEED_GAUSS
190   },
191   [CEED_BP3] = {
192     .ncompu = 1,
193     .qdatasize = 6,
194     .qextra = 1,
195     .setupgeo = SetupDiffGeo,
196     .setuprhs = SetupDiffRhs,
197     .apply = Diff,
198     .error = Error,
199     .setupgeofname = SetupDiffGeo_loc,
200     .setuprhsfname = SetupDiffRhs_loc,
201     .applyfname = Diff_loc,
202     .errorfname = Error_loc,
203     .inmode = CEED_EVAL_GRAD,
204     .outmode = CEED_EVAL_GRAD,
205     .qmode = CEED_GAUSS
206   },
207   [CEED_BP4] = {
208     .ncompu = 3,
209     .qdatasize = 6,
210     .qextra = 1,
211     .setupgeo = SetupDiffGeo,
212     .setuprhs = SetupDiffRhs3,
213     .apply = Diff3,
214     .error = Error3,
215     .setupgeofname = SetupDiffGeo_loc,
216     .setuprhsfname = SetupDiffRhs3_loc,
217     .applyfname = Diff_loc,
218     .errorfname = Error3_loc,
219     .inmode = CEED_EVAL_GRAD,
220     .outmode = CEED_EVAL_GRAD,
221     .qmode = CEED_GAUSS
222   },
223   [CEED_BP5] = {
224     .ncompu = 1,
225     .qdatasize = 6,
226     .qextra = 0,
227     .setupgeo = SetupDiffGeo,
228     .setuprhs = SetupDiffRhs,
229     .apply = Diff,
230     .error = Error,
231     .setupgeofname = SetupDiffGeo_loc,
232     .setuprhsfname = SetupDiffRhs_loc,
233     .applyfname = Diff_loc,
234     .errorfname = Error_loc,
235     .inmode = CEED_EVAL_GRAD,
236     .outmode = CEED_EVAL_GRAD,
237     .qmode = CEED_GAUSS_LOBATTO
238   },
239   [CEED_BP6] = {
240     .ncompu = 3,
241     .qdatasize = 6,
242     .qextra = 0,
243     .setupgeo = SetupDiffGeo,
244     .setuprhs = SetupDiffRhs3,
245     .apply = Diff3,
246     .error = Error3,
247     .setupgeofname = SetupDiffGeo_loc,
248     .setuprhsfname = SetupDiffRhs3_loc,
249     .applyfname = Diff_loc,
250     .errorfname = Error3_loc,
251     .inmode = CEED_EVAL_GRAD,
252     .outmode = CEED_EVAL_GRAD,
253     .qmode = CEED_GAUSS_LOBATTO
254   }
255 };
256 
257 // This function uses libCEED to compute the action of the mass matrix
258 static PetscErrorCode MatMult_Mass(Mat A, Vec X, Vec Y) {
259   PetscErrorCode ierr;
260   User user;
261   PetscScalar *x, *y;
262 
263   PetscFunctionBeginUser;
264   ierr = MatShellGetContext(A, &user); CHKERRQ(ierr);
265   ierr = VecScatterBegin(user->ltog, X, user->Xloc, INSERT_VALUES,
266                          SCATTER_REVERSE); CHKERRQ(ierr);
267   ierr = VecScatterEnd(user->ltog, X, user->Xloc, INSERT_VALUES, SCATTER_REVERSE);
268   CHKERRQ(ierr);
269   ierr = VecZeroEntries(user->Yloc); CHKERRQ(ierr);
270 
271   ierr = VecGetArrayRead(user->Xloc, (const PetscScalar **)&x); CHKERRQ(ierr);
272   ierr = VecGetArray(user->Yloc, &y); CHKERRQ(ierr);
273   CeedVectorSetArray(user->xceed, CEED_MEM_HOST, CEED_USE_POINTER, x);
274   CeedVectorSetArray(user->yceed, CEED_MEM_HOST, CEED_USE_POINTER, y);
275 
276   CeedOperatorApply(user->op, user->xceed, user->yceed,
277                     CEED_REQUEST_IMMEDIATE);
278   ierr = CeedVectorSyncArray(user->yceed, CEED_MEM_HOST); CHKERRQ(ierr);
279 
280   ierr = VecRestoreArrayRead(user->Xloc, (const PetscScalar **)&x); CHKERRQ(ierr);
281   ierr = VecRestoreArray(user->Yloc, &y); CHKERRQ(ierr);
282 
283   if (Y) {
284     ierr = VecZeroEntries(Y); CHKERRQ(ierr);
285     ierr = VecScatterBegin(user->ltog, user->Yloc, Y, ADD_VALUES, SCATTER_FORWARD);
286     CHKERRQ(ierr);
287     ierr = VecScatterEnd(user->ltog, user->Yloc, Y, ADD_VALUES, SCATTER_FORWARD);
288     CHKERRQ(ierr);
289   }
290   PetscFunctionReturn(0);
291 }
292 
293 // This function uses libCEED to compute the action of the Laplacian with
294 // Dirichlet boundary conditions
295 static PetscErrorCode MatMult_Diff(Mat A, Vec X, Vec Y) {
296   PetscErrorCode ierr;
297   User user;
298   PetscScalar *x, *y;
299 
300   PetscFunctionBeginUser;
301   ierr = MatShellGetContext(A, &user); CHKERRQ(ierr);
302 
303   // Global-to-local
304   ierr = VecScatterBegin(user->ltog0, X, user->Xloc, INSERT_VALUES,
305                          SCATTER_REVERSE); CHKERRQ(ierr);
306   ierr = VecScatterEnd(user->ltog0, X, user->Xloc, INSERT_VALUES,
307                        SCATTER_REVERSE);
308   CHKERRQ(ierr);
309   ierr = VecZeroEntries(user->Yloc); CHKERRQ(ierr);
310 
311   // Setup CEED vectors
312   ierr = VecGetArrayRead(user->Xloc, (const PetscScalar **)&x); CHKERRQ(ierr);
313   ierr = VecGetArray(user->Yloc, &y); CHKERRQ(ierr);
314   CeedVectorSetArray(user->xceed, CEED_MEM_HOST, CEED_USE_POINTER, x);
315   CeedVectorSetArray(user->yceed, CEED_MEM_HOST, CEED_USE_POINTER, y);
316 
317   // Apply CEED operator
318   CeedOperatorApply(user->op, user->xceed, user->yceed,
319                     CEED_REQUEST_IMMEDIATE);
320   ierr = CeedVectorSyncArray(user->yceed, CEED_MEM_HOST); CHKERRQ(ierr);
321 
322   // Restore PETSc vectors
323   ierr = VecRestoreArrayRead(user->Xloc, (const PetscScalar **)&x); CHKERRQ(ierr);
324   ierr = VecRestoreArray(user->Yloc, &y); CHKERRQ(ierr);
325 
326   // Local-to-global
327   ierr = VecZeroEntries(Y); CHKERRQ(ierr);
328   ierr = VecScatterBegin(user->gtogD, X, Y, INSERT_VALUES, SCATTER_FORWARD);
329   CHKERRQ(ierr);
330   ierr = VecScatterEnd(user->gtogD, X, Y, INSERT_VALUES, SCATTER_FORWARD);
331   CHKERRQ(ierr);
332   ierr = VecScatterBegin(user->ltog0, user->Yloc, Y, ADD_VALUES, SCATTER_FORWARD);
333   CHKERRQ(ierr);
334   ierr = VecScatterEnd(user->ltog0, user->Yloc, Y, ADD_VALUES, SCATTER_FORWARD);
335   CHKERRQ(ierr);
336 
337   PetscFunctionReturn(0);
338 }
339 
340 // This function calculates the error in the final solution
341 static PetscErrorCode ComputeErrorMax(User user, CeedOperator op_error, Vec X,
342                                       CeedVector target, PetscReal *maxerror) {
343   PetscErrorCode ierr;
344   PetscScalar *x;
345   CeedVector collocated_error;
346   CeedInt length;
347 
348   PetscFunctionBeginUser;
349   CeedVectorGetLength(target, &length);
350   CeedVectorCreate(user->ceed, length, &collocated_error);
351 
352   // Global-to-local
353   ierr = VecScatterBegin(user->ltog, X, user->Xloc, INSERT_VALUES,
354                          SCATTER_REVERSE); CHKERRQ(ierr);
355   ierr = VecScatterEnd(user->ltog, X, user->Xloc, INSERT_VALUES, SCATTER_REVERSE);
356   CHKERRQ(ierr);
357 
358   // Setup CEED vector
359   ierr = VecGetArrayRead(user->Xloc, (const PetscScalar **)&x); CHKERRQ(ierr);
360   CeedVectorSetArray(user->xceed, CEED_MEM_HOST, CEED_USE_POINTER, x);
361 
362   // Apply CEED operator
363   CeedOperatorApply(op_error, user->xceed, collocated_error,
364                     CEED_REQUEST_IMMEDIATE);
365 
366   // Restore PETSc vector
367   VecRestoreArrayRead(user->Xloc, (const PetscScalar **)&x); CHKERRQ(ierr);
368 
369   // Reduce max error
370   *maxerror = 0;
371   const CeedScalar *e;
372   CeedVectorGetArrayRead(collocated_error, CEED_MEM_HOST, &e);
373   for (CeedInt i=0; i<length; i++) {
374     *maxerror = PetscMax(*maxerror, PetscAbsScalar(e[i]));
375   }
376   CeedVectorRestoreArrayRead(collocated_error, &e);
377   ierr = MPI_Allreduce(MPI_IN_PLACE, maxerror,
378                        1, MPIU_REAL, MPIU_MAX, user->comm); CHKERRQ(ierr);
379 
380   // Cleanup
381   CeedVectorDestroy(&collocated_error);
382 
383   PetscFunctionReturn(0);
384 }
385 
386 int main(int argc, char **argv) {
387   PetscInt ierr;
388   MPI_Comm comm;
389   char ceedresource[PETSC_MAX_PATH_LEN] = "/cpu/self";
390   double my_rt_start, my_rt, rt_min, rt_max;
391   PetscInt degree, qextra, localnodes, localelem, melem[3], mnodes[3], p[3],
392            irank[3], lnodes[3], lsize, ncompu = 1;
393   PetscScalar *r;
394   PetscBool test_mode, benchmark_mode, write_solution;
395   PetscMPIInt size, rank;
396   Vec X, Xloc, rhs, rhsloc;
397   Mat mat;
398   KSP ksp;
399   VecScatter ltog, ltog0, gtogD;
400   User user;
401   Ceed ceed;
402   CeedBasis basisx, basisu;
403   CeedElemRestriction Erestrictx, Erestrictu, Erestrictxi, Erestrictui,
404                       Erestrictqdi;
405   CeedQFunction qf_setupgeo, qf_setuprhs, qf_apply, qf_error;
406   CeedOperator op_setupgeo, op_setuprhs, op_apply, op_error;
407   CeedVector xcoord, qdata, rhsceed, target;
408   CeedInt P, Q;
409   const CeedInt dim = 3, ncompx = 3;
410   bpType bpChoice;
411 
412   ierr = PetscInitialize(&argc, &argv, NULL, help);
413   if (ierr) return ierr;
414   comm = PETSC_COMM_WORLD;
415 
416   // Read command line options
417   ierr = PetscOptionsBegin(comm, NULL, "CEED BPs in PETSc", NULL); CHKERRQ(ierr);
418   bpChoice = CEED_BP1;
419   ierr = PetscOptionsEnum("-problem",
420                           "CEED benchmark problem to solve", NULL,
421                           bpTypes, (PetscEnum)bpChoice, (PetscEnum *)&bpChoice,
422                           NULL); CHKERRQ(ierr);
423   ncompu = bpOptions[bpChoice].ncompu;
424   test_mode = PETSC_FALSE;
425   ierr = PetscOptionsBool("-test",
426                           "Testing mode (do not print unless error is large)",
427                           NULL, test_mode, &test_mode, NULL); CHKERRQ(ierr);
428   benchmark_mode = PETSC_FALSE;
429   ierr = PetscOptionsBool("-benchmark",
430                           "Benchmarking mode (prints benchmark statistics)",
431                           NULL, benchmark_mode, &benchmark_mode, NULL);
432   CHKERRQ(ierr);
433   write_solution = PETSC_FALSE;
434   ierr = PetscOptionsBool("-write_solution",
435                           "Write solution for visualization",
436                           NULL, write_solution, &write_solution, NULL);
437   CHKERRQ(ierr);
438   degree = test_mode ? 3 : 1;
439   ierr = PetscOptionsInt("-degree", "Polynomial degree of tensor product basis",
440                          NULL, degree, &degree, NULL); CHKERRQ(ierr);
441   qextra = bpOptions[bpChoice].qextra;
442   ierr = PetscOptionsInt("-qextra", "Number of extra quadrature points",
443                          NULL, qextra, &qextra, NULL); CHKERRQ(ierr);
444   ierr = PetscOptionsString("-ceed", "CEED resource specifier",
445                             NULL, ceedresource, ceedresource,
446                             sizeof(ceedresource), NULL); CHKERRQ(ierr);
447   localnodes = 1000;
448   ierr = PetscOptionsInt("-local",
449                          "Target number of locally owned nodes per process",
450                          NULL, localnodes, &localnodes, NULL); CHKERRQ(ierr);
451   ierr = PetscOptionsEnd(); CHKERRQ(ierr);
452   P = degree + 1;
453   Q = P + qextra;
454 
455   // Determine size of process grid
456   ierr = MPI_Comm_size(comm, &size); CHKERRQ(ierr);
457   Split3(size, p, false);
458 
459   // Find a nicely composite number of elements no less than localnodes
460   for (localelem = PetscMax(1, localnodes / (degree*degree*degree)); ;
461        localelem++) {
462     Split3(localelem, melem, true);
463     if (Max3(melem) / Min3(melem) <= 2) break;
464   }
465 
466   // Find my location in the process grid
467   ierr = MPI_Comm_rank(comm, &rank); CHKERRQ(ierr);
468   for (int d=0,rankleft=rank; d<dim; d++) {
469     const int pstride[3] = {p[1] *p[2], p[2], 1};
470     irank[d] = rankleft / pstride[d];
471     rankleft -= irank[d] * pstride[d];
472   }
473 
474   GlobalNodes(p, irank, degree, melem, mnodes);
475 
476   // Setup global vector
477   ierr = VecCreate(comm, &X); CHKERRQ(ierr);
478   ierr = VecSetSizes(X, mnodes[0]*mnodes[1]*mnodes[2]*ncompu, PETSC_DECIDE);
479   CHKERRQ(ierr);
480   ierr = VecSetUp(X); CHKERRQ(ierr);
481 
482   // Set up libCEED
483   CeedInit(ceedresource, &ceed);
484 
485   // Print summary
486   CeedInt gsize;
487   ierr = VecGetSize(X, &gsize); CHKERRQ(ierr);
488   if (!test_mode) {
489     const char *usedresource;
490     CeedGetResource(ceed, &usedresource);
491     ierr = PetscPrintf(comm,
492                        "\n-- CEED Benchmark Problem %d -- libCEED + PETSc --\n"
493                        "  libCEED:\n"
494                        "    libCEED Backend                    : %s\n"
495                        "  Mesh:\n"
496                        "    Number of 1D Basis Nodes (p)       : %d\n"
497                        "    Number of 1D Quadrature Points (q) : %d\n"
498                        "    Global nodes                       : %D\n"
499                        "    Process Decomposition              : %D %D %D\n"
500                        "    Local Elements                     : %D = %D %D %D\n"
501                        "    Owned nodes                        : %D = %D %D %D\n"
502                        "    DoF per node                       : %D\n",
503                        bpChoice+1, usedresource, P, Q,  gsize/ncompu, p[0],
504                        p[1], p[2], localelem, melem[0], melem[1], melem[2],
505                        mnodes[0]*mnodes[1]*mnodes[2], mnodes[0], mnodes[1],
506                        mnodes[2], ncompu); CHKERRQ(ierr);
507   }
508 
509   {
510     lsize = 1;
511     for (int d=0; d<dim; d++) {
512       lnodes[d] = melem[d]*degree + 1;
513       lsize *= lnodes[d];
514     }
515     ierr = VecCreate(PETSC_COMM_SELF, &Xloc); CHKERRQ(ierr);
516     ierr = VecSetSizes(Xloc, lsize*ncompu, PETSC_DECIDE); CHKERRQ(ierr);
517     ierr = VecSetUp(Xloc); CHKERRQ(ierr);
518 
519     // Create local-to-global scatter
520     PetscInt *ltogind, *ltogind0, *locind, l0count;
521     IS ltogis, ltogis0, locis;
522     PetscInt gstart[2][2][2], gmnodes[2][2][2][dim];
523 
524     for (int i=0; i<2; i++) {
525       for (int j=0; j<2; j++) {
526         for (int k=0; k<2; k++) {
527           PetscInt ijkrank[3] = {irank[0]+i, irank[1]+j, irank[2]+k};
528           gstart[i][j][k] = GlobalStart(p, ijkrank, degree, melem);
529           GlobalNodes(p, ijkrank, degree, melem, gmnodes[i][j][k]);
530         }
531       }
532     }
533 
534     ierr = PetscMalloc1(lsize, &ltogind); CHKERRQ(ierr);
535     ierr = PetscMalloc1(lsize, &ltogind0); CHKERRQ(ierr);
536     ierr = PetscMalloc1(lsize, &locind); CHKERRQ(ierr);
537     l0count = 0;
538     for (PetscInt i=0,ir,ii; ir=i>=mnodes[0], ii=i-ir*mnodes[0], i<lnodes[0]; i++)
539       for (PetscInt j=0,jr,jj; jr=j>=mnodes[1], jj=j-jr*mnodes[1], j<lnodes[1]; j++)
540         for (PetscInt k=0,kr,kk; kr=k>=mnodes[2], kk=k-kr*mnodes[2], k<lnodes[2]; k++) {
541           PetscInt here = (i*lnodes[1]+j)*lnodes[2]+k;
542           ltogind[here] =
543             gstart[ir][jr][kr] + (ii*gmnodes[ir][jr][kr][1]+jj)*gmnodes[ir][jr][kr][2]+kk;
544           if ((irank[0] == 0 && i == 0)
545               || (irank[1] == 0 && j == 0)
546               || (irank[2] == 0 && k == 0)
547               || (irank[0]+1 == p[0] && i+1 == lnodes[0])
548               || (irank[1]+1 == p[1] && j+1 == lnodes[1])
549               || (irank[2]+1 == p[2] && k+1 == lnodes[2]))
550             continue;
551           ltogind0[l0count] = ltogind[here];
552           locind[l0count++] = here;
553         }
554     ierr = ISCreateBlock(comm, ncompu, lsize, ltogind, PETSC_OWN_POINTER,
555                          &ltogis); CHKERRQ(ierr);
556     ierr = VecScatterCreate(Xloc, NULL, X, ltogis, &ltog); CHKERRQ(ierr);
557     CHKERRQ(ierr);
558     ierr = ISCreateBlock(comm, ncompu, l0count, ltogind0, PETSC_OWN_POINTER,
559                          &ltogis0); CHKERRQ(ierr);
560     ierr = ISCreateBlock(comm, ncompu, l0count, locind, PETSC_OWN_POINTER,
561                          &locis); CHKERRQ(ierr);
562     ierr = VecScatterCreate(Xloc, locis, X, ltogis0, &ltog0); CHKERRQ(ierr);
563     {
564       // Create global-to-global scatter for Dirichlet values (everything not in
565       // ltogis0, which is the range of ltog0)
566       PetscInt xstart, xend, *indD, countD = 0;
567       IS isD;
568       const PetscScalar *x;
569       ierr = VecZeroEntries(Xloc); CHKERRQ(ierr);
570       ierr = VecSet(X, 1.0); CHKERRQ(ierr);
571       ierr = VecScatterBegin(ltog0, Xloc, X, INSERT_VALUES, SCATTER_FORWARD);
572       CHKERRQ(ierr);
573       ierr = VecScatterEnd(ltog0, Xloc, X, INSERT_VALUES, SCATTER_FORWARD);
574       CHKERRQ(ierr);
575       ierr = VecGetOwnershipRange(X, &xstart, &xend); CHKERRQ(ierr);
576       ierr = PetscMalloc1(xend-xstart, &indD); CHKERRQ(ierr);
577       ierr = VecGetArrayRead(X, &x); CHKERRQ(ierr);
578       for (PetscInt i=0; i<xend-xstart; i++) {
579         if (x[i] == 1.) indD[countD++] = xstart + i;
580       }
581       ierr = VecRestoreArrayRead(X, &x); CHKERRQ(ierr);
582       ierr = ISCreateGeneral(comm, countD, indD, PETSC_COPY_VALUES, &isD);
583       CHKERRQ(ierr);
584       ierr = PetscFree(indD); CHKERRQ(ierr);
585       ierr = VecScatterCreate(X, isD, X, isD, &gtogD); CHKERRQ(ierr);
586       ierr = ISDestroy(&isD); CHKERRQ(ierr);
587     }
588     ierr = ISDestroy(&ltogis); CHKERRQ(ierr);
589     ierr = ISDestroy(&ltogis0); CHKERRQ(ierr);
590     ierr = ISDestroy(&locis); CHKERRQ(ierr);
591   }
592 
593   // CEED bases
594   CeedBasisCreateTensorH1Lagrange(ceed, dim, ncompu, P, Q,
595                                   bpOptions[bpChoice].qmode, &basisu);
596   CeedBasisCreateTensorH1Lagrange(ceed, dim, ncompx, 2, Q,
597                                   bpOptions[bpChoice].qmode, &basisx);
598 
599   // CEED restrictions
600   CreateRestriction(ceed, CEED_INTERLACED, melem, P, ncompu, &Erestrictu);
601   CreateRestriction(ceed, CEED_NONINTERLACED, melem, 2, dim, &Erestrictx);
602   CeedInt nelem = melem[0]*melem[1]*melem[2];
603   CeedElemRestrictionCreateIdentity(ceed, CEED_NONINTERLACED, nelem, Q*Q*Q,
604                                     nelem*Q*Q*Q, ncompu, &Erestrictui);
605   CeedElemRestrictionCreateIdentity(ceed, CEED_NONINTERLACED, nelem, Q*Q*Q,
606                                     nelem*Q*Q*Q, bpOptions[bpChoice].qdatasize,
607                                     &Erestrictqdi);
608   CeedElemRestrictionCreateIdentity(ceed, CEED_NONINTERLACED, nelem, Q*Q*Q,
609                                     nelem*Q*Q*Q, 1, &Erestrictxi);
610   {
611     CeedScalar *xloc;
612     CeedInt shape[3] = {melem[0]+1, melem[1]+1, melem[2]+1}, len =
613                          shape[0]*shape[1]*shape[2];
614     xloc = malloc(len*ncompx*sizeof xloc[0]);
615     for (CeedInt i=0; i<shape[0]; i++) {
616       for (CeedInt j=0; j<shape[1]; j++) {
617         for (CeedInt k=0; k<shape[2]; k++) {
618           xloc[((i*shape[1]+j)*shape[2]+k) + 0*len] = 1.*(irank[0]*melem[0]+i) /
619               (p[0]*melem[0]);
620           xloc[((i*shape[1]+j)*shape[2]+k) + 1*len] = 1.*(irank[1]*melem[1]+j) /
621               (p[1]*melem[1]);
622           xloc[((i*shape[1]+j)*shape[2]+k) + 2*len] = 1.*(irank[2]*melem[2]+k) /
623               (p[2]*melem[2]);
624         }
625       }
626     }
627     CeedVectorCreate(ceed, len*ncompx, &xcoord);
628     CeedVectorSetArray(xcoord, CEED_MEM_HOST, CEED_OWN_POINTER, xloc);
629   }
630 
631   // Create the Qfunction that builds the operator quadrature data
632   CeedQFunctionCreateInterior(ceed, 1, bpOptions[bpChoice].setupgeo,
633                               bpOptions[bpChoice].setupgeofname, &qf_setupgeo);
634   CeedQFunctionAddInput(qf_setupgeo, "dx", ncompx*dim, CEED_EVAL_GRAD);
635   CeedQFunctionAddInput(qf_setupgeo, "weight", 1, CEED_EVAL_WEIGHT);
636   CeedQFunctionAddOutput(qf_setupgeo, "qdata", bpOptions[bpChoice].qdatasize,
637                          CEED_EVAL_NONE);
638 
639   // Create the Qfunction that sets up the RHS and true solution
640   CeedQFunctionCreateInterior(ceed, 1, bpOptions[bpChoice].setuprhs,
641                               bpOptions[bpChoice].setuprhsfname, &qf_setuprhs);
642   CeedQFunctionAddInput(qf_setuprhs, "x", ncompx, CEED_EVAL_INTERP);
643   CeedQFunctionAddInput(qf_setuprhs, "dx", ncompx*dim, CEED_EVAL_GRAD);
644   CeedQFunctionAddInput(qf_setuprhs, "weight", 1, CEED_EVAL_WEIGHT);
645   CeedQFunctionAddOutput(qf_setuprhs, "true_soln", ncompu, CEED_EVAL_NONE);
646   CeedQFunctionAddOutput(qf_setuprhs, "rhs", ncompu, CEED_EVAL_INTERP);
647 
648   // Set up PDE operator
649   CeedQFunctionCreateInterior(ceed, 1, bpOptions[bpChoice].apply,
650                               bpOptions[bpChoice].applyfname, &qf_apply);
651   // Add inputs and outputs
652   CeedInt inscale = bpOptions[bpChoice].inmode==CEED_EVAL_GRAD ? 3 : 1;
653   CeedInt outscale = bpOptions[bpChoice].outmode==CEED_EVAL_GRAD ? 3 : 1;
654   CeedQFunctionAddInput(qf_apply, "u", ncompu*inscale,
655                         bpOptions[bpChoice].inmode);
656   CeedQFunctionAddInput(qf_apply, "qdata", bpOptions[bpChoice].qdatasize,
657                         CEED_EVAL_NONE);
658   CeedQFunctionAddOutput(qf_apply, "v", ncompu*outscale,
659                          bpOptions[bpChoice].outmode);
660 
661   // Create the error qfunction
662   CeedQFunctionCreateInterior(ceed, 1, bpOptions[bpChoice].error,
663                               bpOptions[bpChoice].errorfname, &qf_error);
664   CeedQFunctionAddInput(qf_error, "u", ncompu, CEED_EVAL_INTERP);
665   CeedQFunctionAddInput(qf_error, "true_soln", ncompu, CEED_EVAL_NONE);
666   CeedQFunctionAddOutput(qf_error, "error", ncompu, CEED_EVAL_NONE);
667 
668   // Create the persistent vectors that will be needed in setup
669   CeedInt nqpts;
670   CeedBasisGetNumQuadraturePoints(basisu, &nqpts);
671   CeedVectorCreate(ceed, bpOptions[bpChoice].qdatasize*nelem*nqpts, &qdata);
672   CeedVectorCreate(ceed, nelem*nqpts*ncompu, &target);
673   CeedVectorCreate(ceed, lsize*ncompu, &rhsceed);
674 
675   // Create the operator that builds the quadrature data for the ceed operator
676   CeedOperatorCreate(ceed, qf_setupgeo, CEED_QFUNCTION_NONE,
677                      CEED_QFUNCTION_NONE, &op_setupgeo);
678   CeedOperatorSetField(op_setupgeo, "dx", Erestrictx, basisx,
679                        CEED_VECTOR_ACTIVE);
680   CeedOperatorSetField(op_setupgeo, "weight", Erestrictxi, basisx,
681                        CEED_VECTOR_NONE);
682   CeedOperatorSetField(op_setupgeo, "qdata", Erestrictqdi,
683                        CEED_BASIS_COLLOCATED, CEED_VECTOR_ACTIVE);
684 
685   // Create the operator that builds the RHS and true solution
686   CeedOperatorCreate(ceed, qf_setuprhs, CEED_QFUNCTION_NONE,
687                      CEED_QFUNCTION_NONE, &op_setuprhs);
688   CeedOperatorSetField(op_setuprhs, "x", Erestrictx, basisx,
689                        CEED_VECTOR_ACTIVE);
690   CeedOperatorSetField(op_setuprhs, "dx", Erestrictx, basisx,
691                        CEED_VECTOR_ACTIVE);
692   CeedOperatorSetField(op_setuprhs, "weight", Erestrictxi, basisx,
693                        CEED_VECTOR_NONE);
694   CeedOperatorSetField(op_setuprhs, "true_soln", Erestrictui,
695                        CEED_BASIS_COLLOCATED, target);
696   CeedOperatorSetField(op_setuprhs, "rhs", Erestrictu, basisu,
697                        CEED_VECTOR_ACTIVE);
698 
699   // Create the mass or diff operator
700   CeedOperatorCreate(ceed, qf_apply, CEED_QFUNCTION_NONE, CEED_QFUNCTION_NONE,
701                      &op_apply);
702   CeedOperatorSetField(op_apply, "u", Erestrictu, basisu, CEED_VECTOR_ACTIVE);
703   CeedOperatorSetField(op_apply, "qdata", Erestrictqdi, CEED_BASIS_COLLOCATED,
704                        qdata);
705   CeedOperatorSetField(op_apply, "v", Erestrictu, basisu, CEED_VECTOR_ACTIVE);
706 
707   // Create the error operator
708   CeedOperatorCreate(ceed, qf_error, CEED_QFUNCTION_NONE, CEED_QFUNCTION_NONE,
709                      &op_error);
710   CeedOperatorSetField(op_error, "u", Erestrictu, basisu, CEED_VECTOR_ACTIVE);
711   CeedOperatorSetField(op_error, "true_soln", Erestrictui,
712                        CEED_BASIS_COLLOCATED, target);
713   CeedOperatorSetField(op_error, "error", Erestrictui, CEED_BASIS_COLLOCATED,
714                        CEED_VECTOR_ACTIVE);
715 
716   // Set up Mat
717   ierr = PetscMalloc1(1, &user); CHKERRQ(ierr);
718   user->comm = comm;
719   user->ltog = ltog;
720   if (bpChoice != CEED_BP1 && bpChoice != CEED_BP2) {
721     user->ltog0 = ltog0;
722     user->gtogD = gtogD;
723   }
724   user->Xloc = Xloc;
725   ierr = VecDuplicate(Xloc, &user->Yloc); CHKERRQ(ierr);
726   CeedVectorCreate(ceed, lsize*ncompu, &user->xceed);
727   CeedVectorCreate(ceed, lsize*ncompu, &user->yceed);
728   user->op = op_apply;
729   user->qdata = qdata;
730   user->ceed = ceed;
731 
732   ierr = MatCreateShell(comm, mnodes[0]*mnodes[1]*mnodes[2]*ncompu,
733                         mnodes[0]*mnodes[1]*mnodes[2]*ncompu,
734                         PETSC_DECIDE, PETSC_DECIDE, user, &mat); CHKERRQ(ierr);
735   if (bpChoice == CEED_BP1 || bpChoice == CEED_BP2) {
736     ierr = MatShellSetOperation(mat, MATOP_MULT, (void(*)(void))MatMult_Mass);
737     CHKERRQ(ierr);
738   } else {
739     ierr = MatShellSetOperation(mat, MATOP_MULT, (void(*)(void))MatMult_Diff);
740     CHKERRQ(ierr);
741   }
742   ierr = MatCreateVecs(mat, &rhs, NULL); CHKERRQ(ierr);
743 
744   // Get RHS vector
745   ierr = VecDuplicate(Xloc, &rhsloc); CHKERRQ(ierr);
746   ierr = VecZeroEntries(rhsloc); CHKERRQ(ierr);
747   ierr = VecGetArray(rhsloc, &r); CHKERRQ(ierr);
748   CeedVectorSetArray(rhsceed, CEED_MEM_HOST, CEED_USE_POINTER, r);
749 
750   // Setup qdata, rhs, and target
751   CeedOperatorApply(op_setupgeo, xcoord, qdata, CEED_REQUEST_IMMEDIATE);
752   CeedOperatorApply(op_setuprhs, xcoord, rhsceed, CEED_REQUEST_IMMEDIATE);
753   ierr = CeedVectorSyncArray(rhsceed, CEED_MEM_HOST); CHKERRQ(ierr);
754   CeedVectorDestroy(&xcoord);
755 
756   // Gather RHS
757   ierr = VecRestoreArray(rhsloc, &r); CHKERRQ(ierr);
758   ierr = VecZeroEntries(rhs); CHKERRQ(ierr);
759   ierr = VecScatterBegin(ltog, rhsloc, rhs, ADD_VALUES, SCATTER_FORWARD);
760   CHKERRQ(ierr);
761   ierr = VecScatterEnd(ltog, rhsloc, rhs, ADD_VALUES, SCATTER_FORWARD);
762   CHKERRQ(ierr);
763   CeedVectorDestroy(&rhsceed);
764 
765   ierr = KSPCreate(comm, &ksp); CHKERRQ(ierr);
766   {
767     PC pc;
768     ierr = KSPGetPC(ksp, &pc); CHKERRQ(ierr);
769     if (bpChoice == CEED_BP1 || bpChoice == CEED_BP2) {
770       ierr = PCSetType(pc, PCJACOBI); CHKERRQ(ierr);
771       ierr = PCJacobiSetType(pc, PC_JACOBI_ROWSUM); CHKERRQ(ierr);
772     } else {
773       ierr = PCSetType(pc, PCNONE); CHKERRQ(ierr);
774     }
775     ierr = KSPSetType(ksp, KSPCG); CHKERRQ(ierr);
776     ierr = KSPSetNormType(ksp, KSP_NORM_NATURAL); CHKERRQ(ierr);
777     ierr = KSPSetTolerances(ksp, 1e-10, PETSC_DEFAULT, PETSC_DEFAULT,
778                             PETSC_DEFAULT); CHKERRQ(ierr);
779   }
780   ierr = KSPSetFromOptions(ksp); CHKERRQ(ierr);
781   ierr = KSPSetOperators(ksp, mat, mat); CHKERRQ(ierr);
782   // First run, if benchmarking
783   if (benchmark_mode) {
784     ierr = KSPSetTolerances(ksp, 1e-10, PETSC_DEFAULT, PETSC_DEFAULT, 1);
785     CHKERRQ(ierr);
786     my_rt_start = MPI_Wtime();
787     ierr = KSPSolve(ksp, rhs, X); CHKERRQ(ierr);
788     my_rt = MPI_Wtime() - my_rt_start;
789     ierr = MPI_Allreduce(MPI_IN_PLACE, &my_rt, 1, MPI_DOUBLE, MPI_MIN, comm);
790     CHKERRQ(ierr);
791     // Set maxits based on first iteration timing
792     if (my_rt > 0.02) {
793       ierr = KSPSetTolerances(ksp, 1e-10, PETSC_DEFAULT, PETSC_DEFAULT, 5);
794       CHKERRQ(ierr);
795     } else {
796       ierr = KSPSetTolerances(ksp, 1e-10, PETSC_DEFAULT, PETSC_DEFAULT, 20);
797       CHKERRQ(ierr);
798     }
799   }
800   // Timed solve
801   ierr = PetscBarrier((PetscObject)ksp); CHKERRQ(ierr);
802   my_rt_start = MPI_Wtime();
803   ierr = KSPSolve(ksp, rhs, X); CHKERRQ(ierr);
804   my_rt = MPI_Wtime() - my_rt_start;
805   {
806     KSPType ksptype;
807     KSPConvergedReason reason;
808     PetscReal rnorm;
809     PetscInt its;
810     ierr = KSPGetType(ksp, &ksptype); CHKERRQ(ierr);
811     ierr = KSPGetConvergedReason(ksp, &reason); CHKERRQ(ierr);
812     ierr = KSPGetIterationNumber(ksp, &its); CHKERRQ(ierr);
813     ierr = KSPGetResidualNorm(ksp, &rnorm); CHKERRQ(ierr);
814     if (!test_mode || reason < 0 || rnorm > 1e-8) {
815       ierr = MPI_Allreduce(&my_rt, &rt_min, 1, MPI_DOUBLE, MPI_MIN, comm);
816       CHKERRQ(ierr);
817       ierr = MPI_Allreduce(&my_rt, &rt_max, 1, MPI_DOUBLE, MPI_MAX, comm);
818       CHKERRQ(ierr);
819       ierr = PetscPrintf(comm,
820                          "  KSP:\n"
821                          "    KSP Type                           : %s\n"
822                          "    KSP Convergence                    : %s\n"
823                          "    Total KSP Iterations               : %D\n"
824                          "    Final rnorm                        : %e\n",
825                          ksptype, KSPConvergedReasons[reason], its,
826                          (double)rnorm); CHKERRQ(ierr);
827       ierr = PetscPrintf(comm,
828                          "  Performance:\n"
829                          "    CG Solve Time                      : %g (%g) sec\n"
830                          "    DoFs/Sec in CG                     : %g (%g) million\n",
831                          rt_max, rt_min, 1e-6*gsize*its/rt_max,
832                          1e-6*gsize*its/rt_min); CHKERRQ(ierr);
833     }
834     if (benchmark_mode && (!test_mode)) {
835       ierr = MPI_Allreduce(&my_rt, &rt_min, 1, MPI_DOUBLE, MPI_MIN, comm);
836       CHKERRQ(ierr);
837       ierr = MPI_Allreduce(&my_rt, &rt_max, 1, MPI_DOUBLE, MPI_MAX, comm);
838       CHKERRQ(ierr);
839       ierr = PetscPrintf(comm,
840                          "  Performance:\n"
841                          "    CG Solve Time                      : %g (%g) sec\n"
842                          "    DoFs/Sec in CG                     : %g (%g) million\n",
843                          rt_max, rt_min, 1e-6*gsize*its/rt_max,
844                          1e-6*gsize*its/rt_min); CHKERRQ(ierr);
845     }
846   }
847 
848   {
849     PetscReal maxerror;
850     ierr = ComputeErrorMax(user, op_error, X, target, &maxerror); CHKERRQ(ierr);
851     PetscReal tol = (bpChoice == CEED_BP1 || bpChoice == CEED_BP2) ? 5e-3 : 5e-2;
852     if (!test_mode || maxerror > tol) {
853       ierr = PetscPrintf(comm,
854                          "    Pointwise Error (max)              : %e\n",
855                          (double)maxerror); CHKERRQ(ierr);
856     }
857   }
858 
859   if (write_solution) {
860     PetscViewer vtkviewersoln;
861 
862     ierr = PetscViewerCreate(comm, &vtkviewersoln); CHKERRQ(ierr);
863     ierr = PetscViewerSetType(vtkviewersoln, PETSCVIEWERVTK); CHKERRQ(ierr);
864     ierr = PetscViewerFileSetName(vtkviewersoln, "solution.vtk"); CHKERRQ(ierr);
865     ierr = VecView(X, vtkviewersoln); CHKERRQ(ierr);
866     ierr = PetscViewerDestroy(&vtkviewersoln); CHKERRQ(ierr);
867   }
868 
869   ierr = VecDestroy(&rhs); CHKERRQ(ierr);
870   ierr = VecDestroy(&rhsloc); CHKERRQ(ierr);
871   ierr = VecDestroy(&X); CHKERRQ(ierr);
872   ierr = VecDestroy(&user->Xloc); CHKERRQ(ierr);
873   ierr = VecDestroy(&user->Yloc); CHKERRQ(ierr);
874   ierr = VecScatterDestroy(&ltog); CHKERRQ(ierr);
875   ierr = VecScatterDestroy(&ltog0); CHKERRQ(ierr);
876   ierr = VecScatterDestroy(&gtogD); CHKERRQ(ierr);
877   ierr = MatDestroy(&mat); CHKERRQ(ierr);
878   ierr = KSPDestroy(&ksp); CHKERRQ(ierr);
879 
880   CeedVectorDestroy(&user->xceed);
881   CeedVectorDestroy(&user->yceed);
882   CeedVectorDestroy(&user->qdata);
883   CeedVectorDestroy(&target);
884   CeedOperatorDestroy(&op_setupgeo);
885   CeedOperatorDestroy(&op_setuprhs);
886   CeedOperatorDestroy(&op_apply);
887   CeedOperatorDestroy(&op_error);
888   CeedElemRestrictionDestroy(&Erestrictu);
889   CeedElemRestrictionDestroy(&Erestrictx);
890   CeedElemRestrictionDestroy(&Erestrictui);
891   CeedElemRestrictionDestroy(&Erestrictxi);
892   CeedElemRestrictionDestroy(&Erestrictqdi);
893   CeedQFunctionDestroy(&qf_setupgeo);
894   CeedQFunctionDestroy(&qf_setuprhs);
895   CeedQFunctionDestroy(&qf_apply);
896   CeedQFunctionDestroy(&qf_error);
897   CeedBasisDestroy(&basisu);
898   CeedBasisDestroy(&basisx);
899   CeedDestroy(&ceed);
900   ierr = PetscFree(user); CHKERRQ(ierr);
901   return PetscFinalize();
902 }
903