1 // Copyright (c) 2017, Lawrence Livermore National Security, LLC. Produced at 2 // the Lawrence Livermore National Laboratory. LLNL-CODE-734707. All Rights 3 // reserved. See files LICENSE and NOTICE for details. 4 // 5 // This file is part of CEED, a collection of benchmarks, miniapps, software 6 // libraries and APIs for efficient high-order finite element and spectral 7 // element discretizations for exascale applications. For more information and 8 // source code availability see http://github.com/ceed. 9 // 10 // The CEED research is supported by the Exascale Computing Project 17-SC-20-SC, 11 // a collaborative effort of two U.S. Department of Energy organizations (Office 12 // of Science and the National Nuclear Security Administration) responsible for 13 // the planning and preparation of a capable exascale ecosystem, including 14 // software, applications, hardware, advanced system engineering and early 15 // testbed platforms, in support of the nation's exascale computing imperative. 16 17 /// @file 18 /// Setup DM for Navier-Stokes example using PETSc 19 20 #include "../navierstokes.h" 21 22 // Read mesh and distribute DM in parallel 23 PetscErrorCode CreateDistributedDM(MPI_Comm comm, ProblemData *problem, 24 SetupContext setup_ctx, DM *dm) { 25 DM dist_mesh = NULL; 26 PetscPartitioner part; 27 PetscInt dim = problem->dim, faces[3] = {3, 3, 3}; 28 const PetscReal scale[3] = {setup_ctx->lx, setup_ctx->ly, setup_ctx->lz}; 29 PetscErrorCode ierr; 30 PetscFunctionBeginUser; 31 32 ierr = PetscOptionsGetIntArray(NULL, NULL, "-dm_plex_box_faces", 33 faces, &dim, NULL); CHKERRQ(ierr); 34 if (!dim) dim = problem->dim; 35 ierr = DMPlexCreateBoxMesh(comm, dim, PETSC_FALSE, faces, NULL, scale, 36 NULL, PETSC_TRUE, dm); CHKERRQ(ierr); 37 38 // Distribute DM in parallel 39 ierr = DMPlexGetPartitioner(*dm, &part); CHKERRQ(ierr); 40 ierr = PetscPartitionerSetFromOptions(part); CHKERRQ(ierr); 41 ierr = DMPlexDistribute(*dm, 0, NULL, &dist_mesh); CHKERRQ(ierr); 42 if (dist_mesh) { 43 ierr = DMDestroy(dm); CHKERRQ(ierr); 44 *dm = dist_mesh; 45 } 46 ierr = DMViewFromOptions(*dm, NULL, "-dm_view"); CHKERRQ(ierr); 47 48 PetscFunctionReturn(0); 49 } 50 51 // Setup DM 52 PetscErrorCode SetUpDM(DM dm, ProblemData *problem, PetscInt degree, 53 SimpleBC bc, Physics phys, void *setup_ctx) { 54 PetscErrorCode ierr; 55 PetscFunctionBeginUser; 56 { 57 // Configure the finite element space and boundary conditions 58 PetscFE fe; 59 PetscInt num_comp_q = 5; 60 ierr = PetscFECreateLagrange(PETSC_COMM_SELF, problem->dim, num_comp_q, 61 PETSC_FALSE, degree, PETSC_DECIDE, 62 &fe); CHKERRQ(ierr); 63 ierr = PetscObjectSetName((PetscObject)fe, "Q"); CHKERRQ(ierr); 64 ierr = DMAddField(dm, NULL,(PetscObject)fe); CHKERRQ(ierr); 65 ierr = DMCreateDS(dm); CHKERRQ(ierr); 66 { 67 /* create FE field for coordinates */ 68 PetscFE fe_coords; 69 PetscInt num_comp_coord; 70 ierr = DMGetCoordinateDim(dm, &num_comp_coord); CHKERRQ(ierr); 71 ierr = PetscFECreateLagrange(PETSC_COMM_SELF, problem->dim, num_comp_coord, 72 PETSC_FALSE, 1, 1, &fe_coords); CHKERRQ(ierr); 73 ierr = DMProjectCoordinates(dm, fe_coords); CHKERRQ(ierr); 74 ierr = PetscFEDestroy(&fe_coords); CHKERRQ(ierr); 75 } 76 ierr = problem->bc_func(dm, bc, phys, setup_ctx); 77 ierr = DMPlexSetClosurePermutationTensor(dm, PETSC_DETERMINE, NULL); 78 CHKERRQ(ierr); 79 ierr = PetscFEDestroy(&fe); CHKERRQ(ierr); 80 } 81 { 82 // Empty name for conserved field (because there is only one field) 83 PetscSection section; 84 ierr = DMGetLocalSection(dm, §ion); CHKERRQ(ierr); 85 ierr = PetscSectionSetFieldName(section, 0, ""); CHKERRQ(ierr); 86 ierr = PetscSectionSetComponentName(section, 0, 0, "Density"); 87 CHKERRQ(ierr); 88 ierr = PetscSectionSetComponentName(section, 0, 1, "Momentum X"); 89 CHKERRQ(ierr); 90 ierr = PetscSectionSetComponentName(section, 0, 2, "Momentum Y"); 91 CHKERRQ(ierr); 92 ierr = PetscSectionSetComponentName(section, 0, 3, "Momentum Z"); 93 CHKERRQ(ierr); 94 ierr = PetscSectionSetComponentName(section, 0, 4, "Energy Density"); 95 CHKERRQ(ierr); 96 } 97 PetscFunctionReturn(0); 98 } 99 100 // Refine DM for high-order viz 101 PetscErrorCode VizRefineDM(DM dm, User user, ProblemData *problem, 102 SimpleBC bc, Physics phys, void *setup_ctx) { 103 PetscErrorCode ierr; 104 DM dm_hierarchy[user->app_ctx->viz_refine + 1]; 105 VecType vec_type; 106 PetscFunctionBeginUser; 107 108 ierr = DMPlexSetRefinementUniform(dm, PETSC_TRUE); CHKERRQ(ierr); 109 110 dm_hierarchy[0] = dm; 111 for (PetscInt i = 0, d = user->app_ctx->degree; 112 i < user->app_ctx->viz_refine; i++) { 113 Mat interp_next; 114 ierr = DMRefine(dm_hierarchy[i], MPI_COMM_NULL, &dm_hierarchy[i+1]); 115 CHKERRQ(ierr); 116 ierr = DMClearDS(dm_hierarchy[i+1]); CHKERRQ(ierr); 117 ierr = DMClearFields(dm_hierarchy[i+1]); CHKERRQ(ierr); 118 ierr = DMSetCoarseDM(dm_hierarchy[i+1], dm_hierarchy[i]); CHKERRQ(ierr); 119 d = (d + 1) / 2; 120 if (i + 1 == user->app_ctx->viz_refine) d = 1; 121 ierr = DMGetVecType(dm, &vec_type); CHKERRQ(ierr); 122 ierr = DMSetVecType(dm_hierarchy[i+1], vec_type); CHKERRQ(ierr); 123 ierr = SetUpDM(dm_hierarchy[i+1], problem, d, bc, phys, setup_ctx); 124 CHKERRQ(ierr); 125 ierr = DMCreateInterpolation(dm_hierarchy[i], dm_hierarchy[i+1], &interp_next, 126 NULL); CHKERRQ(ierr); 127 if (!i) user->interp_viz = interp_next; 128 else { 129 Mat C; 130 ierr = MatMatMult(interp_next, user->interp_viz, MAT_INITIAL_MATRIX, 131 PETSC_DECIDE, &C); CHKERRQ(ierr); 132 ierr = MatDestroy(&interp_next); CHKERRQ(ierr); 133 ierr = MatDestroy(&user->interp_viz); CHKERRQ(ierr); 134 user->interp_viz = C; 135 } 136 } 137 for (PetscInt i=1; i<user->app_ctx->viz_refine; i++) { 138 ierr = DMDestroy(&dm_hierarchy[i]); CHKERRQ(ierr); 139 } 140 user->dm_viz = dm_hierarchy[user->app_ctx->viz_refine]; 141 142 PetscFunctionReturn(0); 143 } 144