xref: /libCEED/examples/fluids/src/setupdm.c (revision 20e46440c131be8f30f4ad01c95e1bd89e184efa)
1 // Copyright (c) 2017, Lawrence Livermore National Security, LLC. Produced at
2 // the Lawrence Livermore National Laboratory. LLNL-CODE-734707. All Rights
3 // reserved. See files LICENSE and NOTICE for details.
4 //
5 // This file is part of CEED, a collection of benchmarks, miniapps, software
6 // libraries and APIs for efficient high-order finite element and spectral
7 // element discretizations for exascale applications. For more information and
8 // source code availability see http://github.com/ceed.
9 //
10 // The CEED research is supported by the Exascale Computing Project 17-SC-20-SC,
11 // a collaborative effort of two U.S. Department of Energy organizations (Office
12 // of Science and the National Nuclear Security Administration) responsible for
13 // the planning and preparation of a capable exascale ecosystem, including
14 // software, applications, hardware, advanced system engineering and early
15 // testbed platforms, in support of the nation's exascale computing imperative.
16 
17 /// @file
18 /// Setup DM for Navier-Stokes example using PETSc
19 
20 #include "../navierstokes.h"
21 
22 // Read mesh and distribute DM in parallel
23 PetscErrorCode CreateDistributedDM(MPI_Comm comm, ProblemData *problem,
24                                    SetupContext setup_ctx, DM *dm) {
25   DM               dist_mesh = NULL;
26   PetscPartitioner part;
27   PetscInt         dim = problem->dim, faces[3] = {3, 3, 3};
28   const PetscReal  scale[3] = {setup_ctx->lx, setup_ctx->ly, setup_ctx->lz};
29   PetscErrorCode   ierr;
30   PetscFunctionBeginUser;
31 
32   ierr = PetscOptionsGetIntArray(NULL, NULL, "-dm_plex_box_faces",
33                                  faces, &dim, NULL); CHKERRQ(ierr);
34   if (!dim) dim = problem->dim;
35   ierr = DMPlexCreateBoxMesh(comm, dim, PETSC_FALSE, faces, NULL, scale,
36                              NULL, PETSC_TRUE, dm); CHKERRQ(ierr);
37 
38   // Distribute DM in parallel
39   ierr = DMPlexGetPartitioner(*dm, &part); CHKERRQ(ierr);
40   ierr = PetscPartitionerSetFromOptions(part); CHKERRQ(ierr);
41   ierr = DMPlexDistribute(*dm, 0, NULL, &dist_mesh); CHKERRQ(ierr);
42   if (dist_mesh) {
43     ierr = DMDestroy(dm); CHKERRQ(ierr);
44     *dm  = dist_mesh;
45   }
46   ierr = DMViewFromOptions(*dm, NULL, "-dm_view"); CHKERRQ(ierr);
47 
48   PetscFunctionReturn(0);
49 }
50 
51 // Setup DM
52 PetscErrorCode SetUpDM(DM dm, ProblemData *problem, PetscInt degree,
53                        SimpleBC bc, Physics phys, void *setup_ctx) {
54   PetscErrorCode ierr;
55   PetscFunctionBeginUser;
56   {
57     // Configure the finite element space and boundary conditions
58     PetscFE  fe;
59     PetscInt num_comp_q = 5;
60     ierr = PetscFECreateLagrange(PETSC_COMM_SELF, problem->dim, num_comp_q,
61                                  PETSC_FALSE, degree, PETSC_DECIDE,
62                                  &fe); CHKERRQ(ierr);
63     ierr = PetscObjectSetName((PetscObject)fe, "Q"); CHKERRQ(ierr);
64     ierr = DMAddField(dm, NULL,(PetscObject)fe); CHKERRQ(ierr);
65     ierr = DMCreateDS(dm); CHKERRQ(ierr);
66     ierr = problem->bc_func(dm, bc, phys, setup_ctx);
67     ierr = DMPlexSetClosurePermutationTensor(dm, PETSC_DETERMINE, NULL);
68     CHKERRQ(ierr);
69     ierr = PetscFEDestroy(&fe); CHKERRQ(ierr);
70   }
71   {
72     // Empty name for conserved field (because there is only one field)
73     PetscSection section;
74     ierr = DMGetLocalSection(dm, &section); CHKERRQ(ierr);
75     ierr = PetscSectionSetFieldName(section, 0, ""); CHKERRQ(ierr);
76     ierr = PetscSectionSetComponentName(section, 0, 0, "Density");
77     CHKERRQ(ierr);
78     ierr = PetscSectionSetComponentName(section, 0, 1, "MomentumX");
79     CHKERRQ(ierr);
80     ierr = PetscSectionSetComponentName(section, 0, 2, "MomentumY");
81     CHKERRQ(ierr);
82     ierr = PetscSectionSetComponentName(section, 0, 3, "MomentumZ");
83     CHKERRQ(ierr);
84     ierr = PetscSectionSetComponentName(section, 0, 4, "EnergyDensity");
85     CHKERRQ(ierr);
86   }
87   PetscFunctionReturn(0);
88 }
89 
90 // Refine DM for high-order viz
91 PetscErrorCode VizRefineDM(DM dm, User user, ProblemData *problem,
92                            SimpleBC bc, Physics phys, void *setup_ctx) {
93   PetscErrorCode ierr;
94   DM             dm_hierarchy[user->app_ctx->viz_refine + 1];
95   VecType        vec_type;
96   PetscFunctionBeginUser;
97 
98   ierr = DMPlexSetRefinementUniform(dm, PETSC_TRUE); CHKERRQ(ierr);
99 
100   dm_hierarchy[0] = dm;
101   for (PetscInt i = 0, d = user->app_ctx->degree;
102        i < user->app_ctx->viz_refine; i++) {
103     Mat interp_next;
104     ierr = DMRefine(dm_hierarchy[i], MPI_COMM_NULL, &dm_hierarchy[i+1]);
105     CHKERRQ(ierr);
106     ierr = DMClearDS(dm_hierarchy[i+1]); CHKERRQ(ierr);
107     ierr = DMClearFields(dm_hierarchy[i+1]); CHKERRQ(ierr);
108     ierr = DMSetCoarseDM(dm_hierarchy[i+1], dm_hierarchy[i]); CHKERRQ(ierr);
109     d = (d + 1) / 2;
110     if (i + 1 == user->app_ctx->viz_refine) d = 1;
111     ierr = DMGetVecType(dm, &vec_type); CHKERRQ(ierr);
112     ierr = DMSetVecType(dm_hierarchy[i+1], vec_type); CHKERRQ(ierr);
113     ierr = SetUpDM(dm_hierarchy[i+1], problem, d, bc, phys, setup_ctx);
114     CHKERRQ(ierr);
115     ierr = DMCreateInterpolation(dm_hierarchy[i], dm_hierarchy[i+1], &interp_next,
116                                  NULL); CHKERRQ(ierr);
117     if (!i) user->interp_viz = interp_next;
118     else {
119       Mat C;
120       ierr = MatMatMult(interp_next, user->interp_viz, MAT_INITIAL_MATRIX,
121                         PETSC_DECIDE, &C); CHKERRQ(ierr);
122       ierr = MatDestroy(&interp_next); CHKERRQ(ierr);
123       ierr = MatDestroy(&user->interp_viz); CHKERRQ(ierr);
124       user->interp_viz = C;
125     }
126   }
127   for (PetscInt i=1; i<user->app_ctx->viz_refine; i++) {
128     ierr = DMDestroy(&dm_hierarchy[i]); CHKERRQ(ierr);
129   }
130   user->dm_viz = dm_hierarchy[user->app_ctx->viz_refine];
131 
132   PetscFunctionReturn(0);
133 }
134