1 // Copyright (c) 2017, Lawrence Livermore National Security, LLC. Produced at 2 // the Lawrence Livermore National Laboratory. LLNL-CODE-734707. All Rights 3 // reserved. See files LICENSE and NOTICE for details. 4 // 5 // This file is part of CEED, a collection of benchmarks, miniapps, software 6 // libraries and APIs for efficient high-order finite element and spectral 7 // element discretizations for exascale applications. For more information and 8 // source code availability see http://github.com/ceed. 9 // 10 // The CEED research is supported by the Exascale Computing Project 17-SC-20-SC, 11 // a collaborative effort of two U.S. Department of Energy organizations (Office 12 // of Science and the National Nuclear Security Administration) responsible for 13 // the planning and preparation of a capable exascale ecosystem, including 14 // software, applications, hardware, advanced system engineering and early 15 // testbed platforms, in support of the nation's exascale computing imperative. 16 17 /// @file 18 /// Setup DM for Navier-Stokes example using PETSc 19 20 #include "../navierstokes.h" 21 22 // Read mesh and distribute DM in parallel 23 PetscErrorCode CreateDistributedDM(MPI_Comm comm, ProblemData *problem, 24 SetupContext setup_ctx, DM *dm) { 25 DM dist_mesh = NULL; 26 PetscPartitioner part; 27 PetscInt dim = problem->dim, faces[3] = {3, 3, 3}; 28 const PetscReal scale[3] = {setup_ctx->lx, setup_ctx->ly, setup_ctx->lz}; 29 PetscErrorCode ierr; 30 PetscFunctionBeginUser; 31 32 ierr = PetscOptionsGetIntArray(NULL, NULL, "-dm_plex_box_faces", 33 faces, &dim, NULL); CHKERRQ(ierr); 34 ierr = DMPlexCreateBoxMesh(comm, dim, PETSC_FALSE, faces, NULL, scale, 35 NULL, PETSC_TRUE, dm); CHKERRQ(ierr); 36 37 // Distribute DM in parallel 38 ierr = DMPlexGetPartitioner(*dm, &part); CHKERRQ(ierr); 39 ierr = PetscPartitionerSetFromOptions(part); CHKERRQ(ierr); 40 ierr = DMPlexDistribute(*dm, 0, NULL, &dist_mesh); CHKERRQ(ierr); 41 if (dist_mesh) { 42 ierr = DMDestroy(dm); CHKERRQ(ierr); 43 *dm = dist_mesh; 44 } 45 ierr = DMViewFromOptions(*dm, NULL, "-dm_view"); CHKERRQ(ierr); 46 47 PetscFunctionReturn(0); 48 } 49 50 // Setup DM 51 PetscErrorCode SetUpDM(DM dm, ProblemData *problem, PetscInt degree, 52 SimpleBC bc, Physics phys, void *setup_ctx) { 53 PetscErrorCode ierr; 54 PetscFunctionBeginUser; 55 { 56 // Configure the finite element space and boundary conditions 57 PetscFE fe; 58 PetscInt num_comp_q = 5; 59 ierr = PetscFECreateLagrange(PETSC_COMM_SELF, problem->dim, num_comp_q, 60 PETSC_FALSE, degree, PETSC_DECIDE, 61 &fe); CHKERRQ(ierr); 62 ierr = PetscObjectSetName((PetscObject)fe, "Q"); CHKERRQ(ierr); 63 ierr = DMAddField(dm, NULL,(PetscObject)fe); CHKERRQ(ierr); 64 ierr = DMCreateDS(dm); CHKERRQ(ierr); 65 ierr = problem->bc_func(dm, bc, phys, setup_ctx); 66 ierr = DMPlexSetClosurePermutationTensor(dm, PETSC_DETERMINE, NULL); 67 CHKERRQ(ierr); 68 ierr = PetscFEDestroy(&fe); CHKERRQ(ierr); 69 } 70 { 71 // Empty name for conserved field (because there is only one field) 72 PetscSection section; 73 ierr = DMGetLocalSection(dm, §ion); CHKERRQ(ierr); 74 ierr = PetscSectionSetFieldName(section, 0, ""); CHKERRQ(ierr); 75 ierr = PetscSectionSetComponentName(section, 0, 0, "Density"); 76 CHKERRQ(ierr); 77 ierr = PetscSectionSetComponentName(section, 0, 1, "MomentumX"); 78 CHKERRQ(ierr); 79 ierr = PetscSectionSetComponentName(section, 0, 2, "MomentumY"); 80 CHKERRQ(ierr); 81 ierr = PetscSectionSetComponentName(section, 0, 3, "MomentumZ"); 82 CHKERRQ(ierr); 83 ierr = PetscSectionSetComponentName(section, 0, 4, "EnergyDensity"); 84 CHKERRQ(ierr); 85 } 86 PetscFunctionReturn(0); 87 } 88 89 // Refine DM for high-order viz 90 PetscErrorCode VizRefineDM(DM dm, User user, ProblemData *problem, 91 SimpleBC bc, Physics phys, void *setup_ctx) { 92 PetscErrorCode ierr; 93 DM dm_hierarchy[user->app_ctx->viz_refine + 1]; 94 VecType vec_type; 95 PetscFunctionBeginUser; 96 97 ierr = DMPlexSetRefinementUniform(dm, PETSC_TRUE); CHKERRQ(ierr); 98 99 dm_hierarchy[0] = dm; 100 for (PetscInt i = 0, d = user->app_ctx->degree; 101 i < user->app_ctx->viz_refine; i++) { 102 Mat interp_next; 103 ierr = DMRefine(dm_hierarchy[i], MPI_COMM_NULL, &dm_hierarchy[i+1]); 104 CHKERRQ(ierr); 105 ierr = DMClearDS(dm_hierarchy[i+1]); CHKERRQ(ierr); 106 ierr = DMClearFields(dm_hierarchy[i+1]); CHKERRQ(ierr); 107 ierr = DMSetCoarseDM(dm_hierarchy[i+1], dm_hierarchy[i]); CHKERRQ(ierr); 108 d = (d + 1) / 2; 109 if (i + 1 == user->app_ctx->viz_refine) d = 1; 110 ierr = DMGetVecType(dm, &vec_type); CHKERRQ(ierr); 111 ierr = DMSetVecType(dm_hierarchy[i+1], vec_type); CHKERRQ(ierr); 112 ierr = SetUpDM(dm_hierarchy[i+1], problem, d, bc, phys, setup_ctx); 113 CHKERRQ(ierr); 114 ierr = DMCreateInterpolation(dm_hierarchy[i], dm_hierarchy[i+1], &interp_next, 115 NULL); CHKERRQ(ierr); 116 if (!i) user->interp_viz = interp_next; 117 else { 118 Mat C; 119 ierr = MatMatMult(interp_next, user->interp_viz, MAT_INITIAL_MATRIX, 120 PETSC_DECIDE, &C); CHKERRQ(ierr); 121 ierr = MatDestroy(&interp_next); CHKERRQ(ierr); 122 ierr = MatDestroy(&user->interp_viz); CHKERRQ(ierr); 123 user->interp_viz = C; 124 } 125 } 126 for (PetscInt i=1; i<user->app_ctx->viz_refine; i++) { 127 ierr = DMDestroy(&dm_hierarchy[i]); CHKERRQ(ierr); 128 } 129 user->dm_viz = dm_hierarchy[user->app_ctx->viz_refine]; 130 131 PetscFunctionReturn(0); 132 } 133