1 /*
2 Inhomogeneous Laplacian in 2D. Modeled by the partial differential equation
3
4 -div \rho grad u = f, 0 < x,y < 1,
5
6 Problem 1: (Default)
7
8 Use the following exact solution with Dirichlet boundary condition
9
10 u = sin(pi*x)*sin(pi*y)
11
12 and generate an appropriate forcing function to measure convergence.
13
14 Usage:
15
16 Measure convergence rate with uniform refinement with the options: "-problem 1 -error".
17
18 mpiexec -n $NP ./ex35 -problem 1 -error -n 16 -levels 5 -pc_type gamg
19 mpiexec -n $NP ./ex35 -problem 1 -error -n 32 -levels 4 -pc_type gamg
20 mpiexec -n $NP ./ex35 -problem 1 -error -n 64 -levels 3 -pc_type mg
21 mpiexec -n $NP ./ex35 -problem 1 -error -n 128 -levels 2 -pc_type mg
22 mpiexec -n $NP ./ex35 -problem 1 -error -n 256 -levels 1 -mg
23 mpiexec -n $NP ./ex35 -problem 1 -error -n 512 -levels 0 -mg
24
25 Or with an external mesh file representing [0, 1]^2,
26
27 mpiexec -n $NP ./ex35 -problem 1 -file ./external_mesh.h5m -levels 2 -error -pc_type mg
28
29 Problem 2:
30
31 Use the forcing function
32
33 f = e^{-((x-xr)^2+(y-yr)^2)/\nu}
34
35 with Dirichlet boundary conditions
36
37 u = f(x,y) for x = 0, x = 1, y = 0, y = 1
38
39 or pure Neumman boundary conditions
40
41 Usage:
42
43 Run with different values of \rho and \nu (problem 1) to control diffusion and gaussian source spread. This uses the internal mesh generator implemented in DMMoab.
44
45 mpiexec -n $NP ./ex35 -problem 2 -n 20 -nu 0.02 -rho 0.01
46 mpiexec -n $NP ./ex35 -problem 2 -n 40 -nu 0.01 -rho 0.005 -io -ksp_monitor
47 mpiexec -n $NP ./ex35 -problem 2 -n 80 -nu 0.01 -rho 0.005 -io -ksp_monitor -pc_type hypre
48 mpiexec -n $NP ./ex35 -problem 2 -n 160 -bc neumann -nu 0.005 -rho 0.01 -io
49 mpiexec -n $NP ./ex35 -problem 2 -n 320 -bc neumann -nu 0.001 -rho 1 -io
50
51 Or with an external mesh file representing [0, 1]^2,
52
53 mpiexec -n $NP ./ex35 -problem 2 -file ./external_mesh.h5m -levels 1 -pc_type gamg
54
55 Problem 3:
56
57 Use the forcing function
58
59 f = \nu*sin(\pi*x/LX)*sin(\pi*y/LY)
60
61 with Dirichlet boundary conditions
62
63 u = 0.0, for x = 0, x = 1, y = 0, y = 1 (outer boundary) &&
64 u = 1.0, for (x-0.5)^2 + (y-0.5)^2 = 0.01
65
66 Usage:
67
68 Now, you could alternately load an external MOAB mesh that discretizes the unit square and use that to run the solver.
69
70 mpiexec -n $NP ./ex35 -problem 3 -file input/square_with_hole.h5m -mg
71 mpiexec -n $NP ./ex35 -problem 3 -file input/square_with_hole.h5m -mg -levels 2 -io -ksp_monitor
72 mpiexec -n $NP ./ex35 -problem 3 -file input/square_with_hole.h5m -io -ksp_monitor -pc_type hypre
73
74 */
75
76 static char help[] = "\
77 Solves the 2D inhomogeneous Laplacian equation.\n \
78 Usage: ./ex35 -problem 1 -error -n 4 -levels 3 -mg\n \
79 Usage: ./ex35 -problem 2 -n 80 -nu 0.01 -rho 0.005 -io -ksp_monitor -pc_type gamg\n \
80 Usage: ./ex35 -problem 3 -file input/square_with_hole.h5m -mg\n";
81
82 /* PETSc includes */
83 #include <petscksp.h>
84 #include <petscdmmoab.h>
85
86 #define LOCAL_ASSEMBLY
87
88 typedef enum {
89 DIRICHLET,
90 NEUMANN
91 } BCType;
92
93 typedef struct {
94 PetscInt dim, n, problem, nlevels;
95 PetscReal rho;
96 PetscReal bounds[6];
97 PetscReal xref, yref;
98 PetscReal nu;
99 PetscInt VPERE;
100 BCType bcType;
101 PetscBool use_extfile, io, error, usetri, usemg;
102 char filename[PETSC_MAX_PATH_LEN];
103 } UserContext;
104
105 static PetscErrorCode ComputeMatrix(KSP, Mat, Mat, void *);
106 static PetscErrorCode ComputeRHS(KSP, Vec, void *);
107 static PetscErrorCode ComputeDiscreteL2Error(KSP, Vec, UserContext *);
108 static PetscErrorCode InitializeOptions(UserContext *);
109
main(int argc,char ** argv)110 int main(int argc, char **argv)
111 {
112 KSP ksp;
113 PC pc;
114 Mat R;
115 DM dm, dmref, *dmhierarchy;
116
117 UserContext user;
118 const char *fields[1] = {"T-Variable"};
119 PetscInt k;
120 Vec b, x, errv;
121
122 PetscFunctionBeginUser;
123 PetscCall(PetscInitialize(&argc, &argv, nullptr, help));
124
125 PetscCall(InitializeOptions(&user));
126
127 /* Create the DM object from either a mesh file or from in-memory structured grid */
128 if (user.use_extfile) {
129 PetscCall(DMMoabLoadFromFile(PETSC_COMM_WORLD, user.dim, 1, user.filename, "", &dm));
130 } else {
131 PetscCall(DMMoabCreateBoxMesh(PETSC_COMM_WORLD, user.dim, user.usetri, user.bounds, user.n, 1, &dm));
132 }
133 PetscCall(DMSetFromOptions(dm));
134 PetscCall(DMMoabSetFieldNames(dm, 1, fields));
135
136 /* SetUp the data structures for DMMOAB */
137 PetscCall(DMSetUp(dm));
138
139 PetscCall(DMSetApplicationContext(dm, &user));
140
141 PetscCall(KSPCreate(PETSC_COMM_WORLD, &ksp));
142 PetscCall(KSPSetComputeRHS(ksp, ComputeRHS, &user));
143 PetscCall(KSPSetComputeOperators(ksp, ComputeMatrix, &user));
144
145 if (user.nlevels) {
146 PetscCall(KSPGetPC(ksp, &pc));
147 PetscCall(PetscMalloc(sizeof(DM) * (user.nlevels + 1), &dmhierarchy));
148 for (k = 0; k <= user.nlevels; k++) dmhierarchy[k] = NULL;
149
150 PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Number of mesh hierarchy levels: %d\n", user.nlevels));
151 PetscCall(DMMoabGenerateHierarchy(dm, user.nlevels, NULL));
152
153 /* coarsest grid = 0, finest grid = nlevels */
154 dmhierarchy[0] = dm;
155 PetscBool usehierarchy = PETSC_FALSE;
156 if (usehierarchy) {
157 PetscCall(DMRefineHierarchy(dm, user.nlevels, &dmhierarchy[1]));
158 } else {
159 for (k = 1; k <= user.nlevels; k++) PetscCall(DMRefine(dmhierarchy[k - 1], MPI_COMM_NULL, &dmhierarchy[k]));
160 }
161 dmref = dmhierarchy[user.nlevels];
162 PetscCall(PetscObjectReference((PetscObject)dmref));
163
164 if (user.usemg) {
165 PetscCall(PCSetType(pc, PCMG));
166 PetscCall(PCMGSetLevels(pc, user.nlevels + 1, NULL));
167 PetscCall(PCMGSetType(pc, PC_MG_MULTIPLICATIVE));
168 PetscCall(PCMGSetGalerkin(pc, PC_MG_GALERKIN_BOTH));
169 PetscCall(PCMGSetCycleType(pc, PC_MG_CYCLE_V));
170 PetscCall(PCMGSetNumberSmooth(pc, 2));
171
172 for (k = 1; k <= user.nlevels; k++) {
173 PetscCall(DMCreateInterpolation(dmhierarchy[k - 1], dmhierarchy[k], &R, NULL));
174 PetscCall(PCMGSetInterpolation(pc, k, R));
175 PetscCall(MatDestroy(&R));
176 }
177 }
178
179 for (k = 1; k <= user.nlevels; k++) PetscCall(DMDestroy(&dmhierarchy[k]));
180 PetscCall(PetscFree(dmhierarchy));
181 } else {
182 dmref = dm;
183 PetscCall(PetscObjectReference((PetscObject)dm));
184 }
185
186 PetscCall(KSPSetDM(ksp, dmref));
187 PetscCall(KSPSetFromOptions(ksp));
188
189 /* Perform the actual solve */
190 PetscCall(KSPSolve(ksp, NULL, NULL));
191 PetscCall(KSPGetSolution(ksp, &x));
192 PetscCall(KSPGetRhs(ksp, &b));
193
194 if (user.error) {
195 PetscCall(VecDuplicate(b, &errv));
196 PetscCall(ComputeDiscreteL2Error(ksp, errv, &user));
197 PetscCall(VecDestroy(&errv));
198 }
199
200 if (user.io) {
201 /* Write out the solution along with the mesh */
202 PetscCall(DMMoabSetGlobalFieldVector(dmref, x));
203 #ifdef MOAB_HAVE_HDF5
204 PetscCall(DMMoabOutput(dmref, "ex35.h5m", NULL));
205 #else
206 /* MOAB does not support true parallel writers that aren't HDF5 based
207 And so if you are using VTK as the output format in parallel,
208 the data could be jumbled due to the order in which the processors
209 write out their parts of the mesh and solution tags */
210 PetscCall(DMMoabOutput(dmref, "ex35.vtk", NULL));
211 #endif
212 }
213
214 /* Cleanup objects */
215 PetscCall(KSPDestroy(&ksp));
216 PetscCall(DMDestroy(&dmref));
217 PetscCall(DMDestroy(&dm));
218 PetscCall(PetscFinalize());
219 return 0;
220 }
221
ComputeDiffusionCoefficient(PetscReal coords[3],UserContext * user)222 PetscReal ComputeDiffusionCoefficient(PetscReal coords[3], UserContext *user)
223 {
224 switch (user->problem) {
225 case 2:
226 if ((coords[0] > user->bounds[1] / 3.0) && (coords[0] < 2.0 * user->bounds[1] / 3.0) && (coords[1] > user->bounds[3] / 3.0) && (coords[1] < 2.0 * user->bounds[3] / 3.0)) {
227 return user->rho;
228 } else {
229 return 1.0;
230 }
231 case 1:
232 case 3:
233 default:
234 return user->rho;
235 }
236 }
237
ExactSolution(PetscReal coords[3],UserContext * user)238 PetscScalar ExactSolution(PetscReal coords[3], UserContext *user)
239 {
240 switch (user->problem) {
241 case 1:
242 return sin(PETSC_PI * coords[0] / user->bounds[1]) * sin(PETSC_PI * coords[1] / user->bounds[3]);
243 case 3:
244 case 2:
245 default:
246 SETERRQ(PETSC_COMM_WORLD, PETSC_ERR_ARG_OUTOFRANGE, "Exact solution for -problem = [%" PetscInt_FMT "] is not available.", user->problem);
247 }
248 }
249
ComputeForcingFunction(PetscReal coords[3],UserContext * user)250 PetscScalar ComputeForcingFunction(PetscReal coords[3], UserContext *user)
251 {
252 switch (user->problem) {
253 case 3:
254 return user->nu * sin(PETSC_PI * coords[0] / user->bounds[1]) * sin(PETSC_PI * coords[1] / user->bounds[3]);
255 case 2:
256 return PetscExpScalar(-((coords[0] - user->xref) * (coords[0] - user->xref) + (coords[1] - user->yref) * (coords[1] - user->yref)) / user->nu);
257 case 1:
258 default:
259 return PETSC_PI * PETSC_PI * ComputeDiffusionCoefficient(coords, user) * (1.0 / user->bounds[1] / user->bounds[1] + 1.0 / user->bounds[3] / user->bounds[3]) * sin(PETSC_PI * coords[0] / user->bounds[1]) * sin(PETSC_PI * coords[1] / user->bounds[3]);
260 }
261 }
262
263 #define BCHECKEPS 1e-10
264 #define BCHECK(coordxyz, truetrace) (coordxyz < truetrace + BCHECKEPS && coordxyz > truetrace - BCHECKEPS)
265
EvaluateStrongDirichletCondition(PetscReal coords[3],UserContext * user)266 PetscScalar EvaluateStrongDirichletCondition(PetscReal coords[3], UserContext *user)
267 {
268 switch (user->problem) {
269 case 3:
270 if (BCHECK(coords[0], user->bounds[0]) || BCHECK(coords[0], user->bounds[1]) || BCHECK(coords[1], user->bounds[2]) || BCHECK(coords[1], user->bounds[3])) return 0.0;
271 else // ( coords[0]*coords[0] + coords[1]*coords[1] < 0.04 + BCHECKEPS)
272 return 1.0;
273 case 2:
274 return ComputeForcingFunction(coords, user);
275 case 1:
276 default:
277 return ExactSolution(coords, user);
278 }
279 }
280
ComputeRHS(KSP ksp,Vec b,void * ptr)281 PetscErrorCode ComputeRHS(KSP ksp, Vec b, void *ptr)
282 {
283 UserContext *user = (UserContext *)ptr;
284 DM dm;
285 PetscInt dof_indices[4];
286 PetscBool dbdry[4];
287 PetscReal vpos[4 * 3];
288 PetscScalar ff;
289 PetscInt i, q, nconn, nc, npoints;
290 const moab::EntityHandle *connect;
291 const moab::Range *elocal;
292 moab::Interface *mbImpl;
293 PetscScalar localv[4];
294 PetscReal *phi, *phypts, *jxw;
295 PetscBool elem_on_boundary;
296 PetscQuadrature quadratureObj;
297
298 PetscFunctionBegin;
299 PetscCall(KSPGetDM(ksp, &dm));
300
301 /* reset the RHS */
302 PetscCall(VecSet(b, 0.0));
303
304 PetscCall(DMMoabFEMCreateQuadratureDefault(2, user->VPERE, &quadratureObj));
305 PetscCall(PetscQuadratureGetData(quadratureObj, NULL, &nc, &npoints, NULL, NULL));
306 PetscCall(PetscMalloc3(user->VPERE * npoints, &phi, npoints * 3, &phypts, npoints, &jxw));
307
308 /* get the essential MOAB mesh related quantities needed for FEM assembly */
309 PetscCall(DMMoabGetInterface(dm, &mbImpl));
310 PetscCall(DMMoabGetLocalElements(dm, &elocal));
311
312 /* loop over local elements */
313 for (moab::Range::iterator iter = elocal->begin(); iter != elocal->end(); iter++) {
314 const moab::EntityHandle ehandle = *iter;
315
316 /* Get connectivity information: */
317 PetscCall(DMMoabGetElementConnectivity(dm, ehandle, &nconn, &connect));
318 PetscCheck(nconn == 3 || nconn == 4, PETSC_COMM_WORLD, PETSC_ERR_ARG_WRONG, "Only TRI3/QUAD4 element bases are supported in the current example. n(Connectivity)=%" PetscInt_FMT ".", nconn);
319
320 PetscCall(PetscArrayzero(localv, nconn));
321
322 /* get the coordinates of the element vertices */
323 PetscCall(DMMoabGetVertexCoordinates(dm, nconn, connect, vpos));
324
325 /* get the local DoF numbers to appropriately set the element contribution in the operator */
326 #ifdef LOCAL_ASSEMBLY
327 PetscCall(DMMoabGetFieldDofsLocal(dm, nconn, connect, 0, dof_indices));
328 #else
329 PetscCall(DMMoabGetFieldDofs(dm, nconn, connect, 0, dof_indices));
330 #endif
331
332 /* 1) compute the basis functions and the derivatives wrt x and y directions
333 2) compute the quadrature points transformed to the physical space */
334 PetscCall(DMMoabFEMComputeBasis(2, nconn, vpos, quadratureObj, phypts, jxw, phi, NULL));
335
336 /* Compute function over the locally owned part of the grid */
337 for (q = 0; q < npoints; ++q) {
338 ff = ComputeForcingFunction(&phypts[3 * q], user);
339
340 for (i = 0; i < nconn; ++i) localv[i] += jxw[q] * phi[q * nconn + i] * ff;
341 }
342
343 /* check if element is on the boundary */
344 PetscCall(DMMoabIsEntityOnBoundary(dm, ehandle, &elem_on_boundary));
345
346 /* apply dirichlet boundary conditions */
347 if (elem_on_boundary && user->bcType == DIRICHLET) {
348 /* get the list of nodes on boundary so that we can enforce dirichlet conditions strongly */
349 PetscCall(DMMoabCheckBoundaryVertices(dm, nconn, connect, dbdry));
350
351 for (i = 0; i < nconn; ++i) {
352 if (dbdry[i]) { /* dirichlet node */
353 /* think about strongly imposing dirichlet */
354 localv[i] = EvaluateStrongDirichletCondition(&vpos[3 * i], user);
355 }
356 }
357 }
358
359 #ifdef LOCAL_ASSEMBLY
360 /* set the values directly into appropriate locations. Can alternately use VecSetValues */
361 PetscCall(VecSetValuesLocal(b, nconn, dof_indices, localv, ADD_VALUES));
362 #else
363 PetscCall(VecSetValues(b, nconn, dof_indices, localv, ADD_VALUES));
364 #endif
365 }
366
367 /* force right-hand side to be consistent for singular matrix */
368 /* note this is really a hack, normally the model would provide you with a consistent right handside */
369 if (user->bcType == NEUMANN) {
370 MatNullSpace nullspace;
371 PetscCall(MatNullSpaceCreate(PETSC_COMM_WORLD, PETSC_TRUE, 0, 0, &nullspace));
372 PetscCall(MatNullSpaceRemove(nullspace, b));
373 PetscCall(MatNullSpaceDestroy(&nullspace));
374 }
375
376 /* Restore vectors */
377 PetscCall(VecAssemblyBegin(b));
378 PetscCall(VecAssemblyEnd(b));
379 PetscCall(PetscFree3(phi, phypts, jxw));
380 PetscCall(PetscQuadratureDestroy(&quadratureObj));
381 PetscFunctionReturn(PETSC_SUCCESS);
382 }
383
ComputeMatrix(KSP ksp,Mat J,Mat jac,PetscCtx ctx)384 PetscErrorCode ComputeMatrix(KSP ksp, Mat J, Mat jac, PetscCtx ctx)
385 {
386 UserContext *user = (UserContext *)ctx;
387 DM dm;
388 PetscInt i, j, q, nconn, nglobale, nglobalv, nc, npoints, hlevel;
389 PetscInt dof_indices[4];
390 PetscReal vpos[4 * 3], rho;
391 PetscBool dbdry[4];
392 const moab::EntityHandle *connect;
393 const moab::Range *elocal;
394 moab::Interface *mbImpl;
395 PetscBool elem_on_boundary;
396 PetscScalar array[4 * 4];
397 PetscReal *phi, *dphi[2], *phypts, *jxw;
398 PetscQuadrature quadratureObj;
399
400 PetscFunctionBeginUser;
401 PetscCall(KSPGetDM(ksp, &dm));
402
403 /* get the essential MOAB mesh related quantities needed for FEM assembly */
404 PetscCall(DMMoabGetInterface(dm, &mbImpl));
405 PetscCall(DMMoabGetLocalElements(dm, &elocal));
406 PetscCall(DMMoabGetSize(dm, &nglobale, &nglobalv));
407 PetscCall(DMMoabGetHierarchyLevel(dm, &hlevel));
408 PetscCall(PetscPrintf(PETSC_COMM_WORLD, "ComputeMatrix: Level = %d, N(elements) = %d, N(vertices) = %d \n", hlevel, nglobale, nglobalv));
409
410 PetscCall(DMMoabFEMCreateQuadratureDefault(2, user->VPERE, &quadratureObj));
411 PetscCall(PetscQuadratureGetData(quadratureObj, NULL, &nc, &npoints, NULL, NULL));
412 PetscCall(PetscMalloc5(user->VPERE * npoints, &phi, user->VPERE * npoints, &dphi[0], user->VPERE * npoints, &dphi[1], npoints * 3, &phypts, npoints, &jxw));
413
414 /* loop over local elements */
415 for (moab::Range::iterator iter = elocal->begin(); iter != elocal->end(); iter++) {
416 const moab::EntityHandle ehandle = *iter;
417
418 // Get connectivity information:
419 PetscCall(DMMoabGetElementConnectivity(dm, ehandle, &nconn, &connect));
420 PetscCheck(nconn == 3 || nconn == 4, PETSC_COMM_WORLD, PETSC_ERR_ARG_WRONG, "Only QUAD4 or TRI3 element bases are supported in the current example. Connectivity=%" PetscInt_FMT ".", nconn);
421
422 /* compute the mid-point of the element and use a 1-point lumped quadrature */
423 PetscCall(DMMoabGetVertexCoordinates(dm, nconn, connect, vpos));
424
425 /* get the global DOF number to appropriately set the element contribution in the RHS vector */
426 #ifdef LOCAL_ASSEMBLY
427 PetscCall(DMMoabGetFieldDofsLocal(dm, nconn, connect, 0, dof_indices));
428 #else
429 PetscCall(DMMoabGetFieldDofs(dm, nconn, connect, 0, dof_indices));
430 #endif
431
432 /* 1) compute the basis functions and the derivatives wrt x and y directions
433 2) compute the quadrature points transformed to the physical space */
434 PetscCall(DMMoabFEMComputeBasis(2, nconn, vpos, quadratureObj, phypts, jxw, phi, dphi));
435
436 PetscCall(PetscArrayzero(array, nconn * nconn));
437
438 /* Compute function over the locally owned part of the grid */
439 for (q = 0; q < npoints; ++q) {
440 /* compute the inhomogeneous (piece-wise constant) diffusion coefficient at the quadrature point
441 -- for large spatial variations (within an element), embed this property evaluation inside the quadrature loop
442 */
443 rho = ComputeDiffusionCoefficient(&phypts[q * 3], user);
444
445 for (i = 0; i < nconn; ++i) {
446 for (j = 0; j < nconn; ++j) array[i * nconn + j] += jxw[q] * rho * (dphi[0][q * nconn + i] * dphi[0][q * nconn + j] + dphi[1][q * nconn + i] * dphi[1][q * nconn + j]);
447 }
448 }
449
450 /* check if element is on the boundary */
451 PetscCall(DMMoabIsEntityOnBoundary(dm, ehandle, &elem_on_boundary));
452
453 /* apply dirichlet boundary conditions */
454 if (elem_on_boundary && user->bcType == DIRICHLET) {
455 /* get the list of nodes on boundary so that we can enforce dirichlet conditions strongly */
456 PetscCall(DMMoabCheckBoundaryVertices(dm, nconn, connect, dbdry));
457
458 for (i = 0; i < nconn; ++i) {
459 if (dbdry[i]) { /* dirichlet node */
460 /* think about strongly imposing dirichlet */
461 for (j = 0; j < nconn; ++j) {
462 /* TODO: symmetrize the system - need the RHS */
463 array[i * nconn + j] = 0.0;
464 }
465 array[i * nconn + i] = 1.0;
466 }
467 }
468 }
469
470 /* set the values directly into appropriate locations. */
471 #ifdef LOCAL_ASSEMBLY
472 PetscCall(MatSetValuesLocal(jac, nconn, dof_indices, nconn, dof_indices, array, ADD_VALUES));
473 #else
474 PetscCall(MatSetValues(jac, nconn, dof_indices, nconn, dof_indices, array, ADD_VALUES));
475 #endif
476 }
477
478 PetscCall(MatAssemblyBegin(jac, MAT_FINAL_ASSEMBLY));
479 PetscCall(MatAssemblyEnd(jac, MAT_FINAL_ASSEMBLY));
480
481 if (user->bcType == NEUMANN) {
482 MatNullSpace nullspace;
483
484 PetscCall(MatNullSpaceCreate(PETSC_COMM_WORLD, PETSC_TRUE, 0, 0, &nullspace));
485 PetscCall(MatSetNullSpace(J, nullspace));
486 PetscCall(MatNullSpaceDestroy(&nullspace));
487 }
488 PetscCall(PetscFree5(phi, dphi[0], dphi[1], phypts, jxw));
489 PetscCall(PetscQuadratureDestroy(&quadratureObj));
490 PetscFunctionReturn(PETSC_SUCCESS);
491 }
492
ComputeDiscreteL2Error(KSP ksp,Vec err,UserContext * user)493 PetscErrorCode ComputeDiscreteL2Error(KSP ksp, Vec err, UserContext *user)
494 {
495 DM dm;
496 Vec sol;
497 PetscReal vpos[3];
498 const PetscScalar *x;
499 PetscScalar *e;
500 PetscReal l2err = 0.0, linferr = 0.0, global_l2, global_linf;
501 PetscInt dof_index, N;
502 const moab::Range *ownedvtx;
503
504 PetscFunctionBegin;
505 PetscCall(KSPGetDM(ksp, &dm));
506
507 /* get the solution vector */
508 PetscCall(KSPGetSolution(ksp, &sol));
509
510 /* Get the internal reference to the vector arrays */
511 PetscCall(VecGetArrayRead(sol, &x));
512 PetscCall(VecGetSize(sol, &N));
513 if (err) {
514 /* reset the error vector */
515 PetscCall(VecSet(err, 0.0));
516 /* get array reference */
517 PetscCall(VecGetArray(err, &e));
518 }
519
520 PetscCall(DMMoabGetLocalVertices(dm, &ownedvtx, NULL));
521
522 /* Compute function over the locally owned part of the grid */
523 for (moab::Range::iterator iter = ownedvtx->begin(); iter != ownedvtx->end(); iter++) {
524 const moab::EntityHandle vhandle = *iter;
525
526 /* get the local DoF numbers to appropriately set the element contribution in the operator */
527 #ifdef LOCAL_ASSEMBLY
528 PetscCall(DMMoabGetFieldDofsLocal(dm, 1, &vhandle, 0, &dof_index));
529 #else
530 PetscCall(DMMoabGetFieldDofs(dm, 1, &vhandle, 0, &dof_index));
531 #endif
532
533 /* compute the mid-point of the element and use a 1-point lumped quadrature */
534 PetscCall(DMMoabGetVertexCoordinates(dm, 1, &vhandle, vpos));
535
536 /* compute the discrete L2 error against the exact solution */
537 const PetscScalar lerr = (ExactSolution(vpos, user) - x[dof_index]);
538 l2err += PetscRealPart(lerr * PetscConj(lerr));
539 if (linferr < PetscAbsScalar(lerr)) linferr = PetscAbsScalar(lerr);
540
541 if (err) { /* set the discrete L2 error against the exact solution */
542 e[dof_index] = lerr;
543 }
544 }
545
546 PetscCallMPI(MPIU_Allreduce(&l2err, &global_l2, 1, MPI_DOUBLE, MPI_SUM, PETSC_COMM_WORLD));
547 PetscCallMPI(MPIU_Allreduce(&linferr, &global_linf, 1, MPI_DOUBLE, MPI_MAX, PETSC_COMM_WORLD));
548 PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Computed Errors: L_2 = %f, L_inf = %f\n", sqrt(global_l2 / N), global_linf));
549
550 /* Restore vectors */
551 PetscCall(VecRestoreArrayRead(sol, &x));
552 if (err) PetscCall(VecRestoreArray(err, &e));
553 PetscFunctionReturn(PETSC_SUCCESS);
554 }
555
InitializeOptions(UserContext * user)556 PetscErrorCode InitializeOptions(UserContext *user)
557 {
558 const char *bcTypes[2] = {"dirichlet", "neumann"};
559 PetscInt bc;
560
561 PetscFunctionBegin;
562 /* set default parameters */
563 user->dim = 2;
564 user->problem = 1;
565 user->n = 2;
566 user->nlevels = 2;
567 user->rho = 0.1;
568 user->bounds[0] = user->bounds[2] = user->bounds[4] = 0.0;
569 user->bounds[1] = user->bounds[3] = user->bounds[5] = 1.0;
570 user->xref = user->bounds[1] / 2;
571 user->yref = user->bounds[3] / 2;
572 user->nu = 0.05;
573 user->usemg = PETSC_FALSE;
574 user->io = PETSC_FALSE;
575 user->usetri = PETSC_FALSE;
576 user->error = PETSC_FALSE;
577 bc = (PetscInt)DIRICHLET;
578
579 PetscOptionsBegin(PETSC_COMM_WORLD, "", "Options for the inhomogeneous Poisson equation", "ex35.cxx");
580 PetscCall(PetscOptionsInt("-problem", "The type of problem being solved (controls forcing function)", "ex35.cxx", user->problem, &user->problem, NULL));
581 PetscCall(PetscOptionsInt("-n", "The elements in each direction", "ex35.cxx", user->n, &user->n, NULL));
582 PetscCall(PetscOptionsInt("-levels", "Number of levels in the multigrid hierarchy", "ex35.cxx", user->nlevels, &user->nlevels, NULL));
583 PetscCall(PetscOptionsReal("-rho", "The conductivity", "ex35.cxx", user->rho, &user->rho, NULL));
584 PetscCall(PetscOptionsReal("-x", "The domain size in x-direction", "ex35.cxx", user->bounds[1], &user->bounds[1], NULL));
585 PetscCall(PetscOptionsReal("-y", "The domain size in y-direction", "ex35.cxx", user->bounds[3], &user->bounds[3], NULL));
586 PetscCall(PetscOptionsReal("-xref", "The x-coordinate of Gaussian center (for -problem 1)", "ex35.cxx", user->xref, &user->xref, NULL));
587 PetscCall(PetscOptionsReal("-yref", "The y-coordinate of Gaussian center (for -problem 1)", "ex35.cxx", user->yref, &user->yref, NULL));
588 PetscCall(PetscOptionsReal("-nu", "The width of the Gaussian source (for -problem 1)", "ex35.cxx", user->nu, &user->nu, NULL));
589 PetscCall(PetscOptionsBool("-mg", "Use multigrid preconditioner", "ex35.cxx", user->usemg, &user->usemg, NULL));
590 PetscCall(PetscOptionsBool("-io", "Write out the solution and mesh data", "ex35.cxx", user->io, &user->io, NULL));
591 PetscCall(PetscOptionsBool("-tri", "Use triangles to discretize the domain", "ex35.cxx", user->usetri, &user->usetri, NULL));
592 PetscCall(PetscOptionsBool("-error", "Compute the discrete L_2 and L_inf errors of the solution", "ex35.cxx", user->error, &user->error, NULL));
593 PetscCall(PetscOptionsEList("-bc", "Type of boundary condition", "ex35.cxx", bcTypes, 2, bcTypes[0], &bc, NULL));
594 PetscCall(PetscOptionsString("-file", "The mesh file for the problem", "ex35.cxx", "", user->filename, sizeof(user->filename), &user->use_extfile));
595 PetscOptionsEnd();
596
597 if (user->problem < 1 || user->problem > 3) user->problem = 1;
598 user->bcType = (BCType)bc;
599 user->VPERE = (user->usetri ? 3 : 4);
600 if (user->problem == 3) {
601 user->bounds[0] = user->bounds[2] = -0.5;
602 user->bounds[1] = user->bounds[3] = 0.5;
603 user->bounds[4] = user->bounds[5] = 0.5;
604 }
605 PetscFunctionReturn(PETSC_SUCCESS);
606 }
607
608 /*TEST
609
610 build:
611 requires: moab !complex
612
613 test:
614 args: -levels 0 -nu .01 -n 10 -ksp_type cg -pc_type sor -ksp_converged_reason
615
616 test:
617 suffix: 2
618 nsize: 2
619 requires: hdf5
620 args: -levels 3 -nu .01 -n 2 -mg -ksp_converged_reason
621
622 test:
623 suffix: 3
624 nsize: 2
625 requires: hdf5
626 args: -problem 3 -file data/ex35_mesh.h5m -mg -levels 1 -ksp_converged_reason
627 localrunfiles: data
628
629 TEST*/
630