1 // Copyright (c) 2017-2022, Lawrence Livermore National Security, LLC and other CEED contributors. 2 // All Rights Reserved. See the top-level LICENSE and NOTICE files for details. 3 // 4 // SPDX-License-Identifier: BSD-2-Clause 5 // 6 // This file is part of CEED: http://github.com/ceed 7 8 /// @file 9 /// Time-stepping functions for Navier-Stokes example using PETSc 10 11 #include <ceed.h> 12 #include <petscdmplex.h> 13 #include <petscts.h> 14 15 #include "../navierstokes.h" 16 #include "../qfunctions/newtonian_state.h" 17 18 // Compute mass matrix for explicit scheme 19 PetscErrorCode ComputeLumpedMassMatrix(Ceed ceed, DM dm, CeedData ceed_data, Vec M) { 20 CeedQFunction qf_mass; 21 CeedOperator op_mass; 22 OperatorApplyContext op_mass_ctx; 23 Vec Ones_loc; 24 CeedInt num_comp_q, q_data_size; 25 PetscFunctionBeginUser; 26 27 // CEED Restriction 28 CeedElemRestrictionGetNumComponents(ceed_data->elem_restr_q, &num_comp_q); 29 CeedElemRestrictionGetNumComponents(ceed_data->elem_restr_qd_i, &q_data_size); 30 31 // CEED QFunction 32 PetscCall(CreateMassQFunction(ceed, num_comp_q, q_data_size, &qf_mass)); 33 34 // CEED Operator 35 CeedOperatorCreate(ceed, qf_mass, NULL, NULL, &op_mass); 36 CeedOperatorSetField(op_mass, "u", ceed_data->elem_restr_q, ceed_data->basis_q, CEED_VECTOR_ACTIVE); 37 CeedOperatorSetField(op_mass, "qdata", ceed_data->elem_restr_qd_i, CEED_BASIS_COLLOCATED, ceed_data->q_data); 38 CeedOperatorSetField(op_mass, "v", ceed_data->elem_restr_q, ceed_data->basis_q, CEED_VECTOR_ACTIVE); 39 40 PetscCall(OperatorApplyContextCreate(NULL, dm, ceed, op_mass, NULL, NULL, NULL, NULL, &op_mass_ctx)); 41 42 PetscCall(DMGetLocalVector(dm, &Ones_loc)); 43 PetscCall(VecSet(Ones_loc, 1)); 44 PetscCall(ApplyCeedOperatorLocalToGlobal(Ones_loc, M, op_mass_ctx)); 45 46 // Invert diagonally lumped mass vector for RHS function 47 PetscCall(VecReciprocal(M)); 48 49 // Cleanup 50 PetscCall(OperatorApplyContextDestroy(op_mass_ctx)); 51 PetscCall(DMRestoreLocalVector(dm, &Ones_loc)); 52 CeedQFunctionDestroy(&qf_mass); 53 CeedOperatorDestroy(&op_mass); 54 55 PetscFunctionReturn(0); 56 } 57 58 // Insert Boundary values if it's a new time 59 PetscErrorCode UpdateBoundaryValues(User user, Vec Q_loc, PetscReal t) { 60 PetscFunctionBeginUser; 61 if (user->time_bc_set != t) { 62 PetscCall(DMPlexInsertBoundaryValues(user->dm, PETSC_TRUE, Q_loc, t, NULL, NULL, NULL)); 63 user->time_bc_set = t; 64 } 65 PetscFunctionReturn(0); 66 } 67 68 // @brief Update the context label value to new value if necessary. 69 // @note This only supports labels with scalar label values (ie. not arrays) 70 PetscErrorCode UpdateContextLabel(MPI_Comm comm, PetscScalar update_value, CeedOperator op, CeedContextFieldLabel label) { 71 PetscScalar label_value; 72 73 PetscFunctionBeginUser; 74 PetscCheck(label, comm, PETSC_ERR_ARG_BADPTR, "Label should be non-NULL"); 75 76 { 77 size_t num_elements; 78 const PetscScalar *label_values; 79 CeedOperatorGetContextDoubleRead(op, label, &num_elements, &label_values); 80 PetscCheck(num_elements == 1, comm, PETSC_ERR_SUP, "%s does not support labels with more than 1 value. Label has %zu values", __func__, 81 num_elements); 82 label_value = *label_values; 83 CeedOperatorRestoreContextDoubleRead(op, label, &label_values); 84 } 85 86 if (label_value != update_value) { 87 CeedOperatorSetContextDouble(op, label, &update_value); 88 } 89 PetscFunctionReturn(0); 90 } 91 92 // RHS (Explicit time-stepper) function setup 93 // This is the RHS of the ODE, given as u_t = G(t,u) 94 // This function takes in a state vector Q and writes into G 95 PetscErrorCode RHS_NS(TS ts, PetscReal t, Vec Q, Vec G, void *user_data) { 96 User user = *(User *)user_data; 97 MPI_Comm comm = PetscObjectComm((PetscObject)ts); 98 PetscScalar dt; 99 Vec Q_loc = user->Q_loc, G_loc; 100 PetscMemType q_mem_type, g_mem_type; 101 PetscFunctionBeginUser; 102 103 // Get local vector 104 PetscCall(DMGetLocalVector(user->dm, &G_loc)); 105 106 // Update time dependent data 107 PetscCall(UpdateBoundaryValues(user, Q_loc, t)); 108 if (user->phys->solution_time_label) PetscCall(UpdateContextLabel(comm, t, user->op_rhs, user->phys->solution_time_label)); 109 PetscCall(TSGetTimeStep(ts, &dt)); 110 if (user->phys->timestep_size_label) PetscCall(UpdateContextLabel(comm, dt, user->op_rhs, user->phys->timestep_size_label)); 111 112 // Global-to-local 113 PetscCall(DMGlobalToLocal(user->dm, Q, INSERT_VALUES, Q_loc)); 114 115 // Place PETSc vectors in CEED vectors 116 PetscCall(VecReadP2C(Q_loc, &q_mem_type, user->q_ceed)); 117 PetscCall(VecP2C(G_loc, &g_mem_type, user->g_ceed)); 118 119 // Apply CEED operator 120 CeedOperatorApply(user->op_rhs, user->q_ceed, user->g_ceed, CEED_REQUEST_IMMEDIATE); 121 122 // Restore vectors 123 PetscCall(VecReadC2P(user->q_ceed, q_mem_type, Q_loc)); 124 PetscCall(VecC2P(user->g_ceed, g_mem_type, G_loc)); 125 126 // Local-to-Global 127 PetscCall(VecZeroEntries(G)); 128 PetscCall(DMLocalToGlobal(user->dm, G_loc, ADD_VALUES, G)); 129 130 // Inverse of the lumped mass matrix (M is Minv) 131 PetscCall(VecPointwiseMult(G, G, user->M)); 132 133 // Restore vectors 134 PetscCall(DMRestoreLocalVector(user->dm, &G_loc)); 135 136 PetscFunctionReturn(0); 137 } 138 139 // Surface forces function setup 140 static PetscErrorCode Surface_Forces_NS(DM dm, Vec G_loc, PetscInt num_walls, const PetscInt walls[], PetscScalar *reaction_force) { 141 DMLabel face_label; 142 const PetscScalar *g; 143 PetscInt dof, dim = 3; 144 MPI_Comm comm; 145 PetscSection s; 146 147 PetscFunctionBeginUser; 148 PetscCall(PetscArrayzero(reaction_force, num_walls * dim)); 149 PetscCall(PetscObjectGetComm((PetscObject)dm, &comm)); 150 PetscCall(DMGetLabel(dm, "Face Sets", &face_label)); 151 PetscCall(VecGetArrayRead(G_loc, &g)); 152 for (PetscInt w = 0; w < num_walls; w++) { 153 const PetscInt wall = walls[w]; 154 IS wall_is; 155 PetscCall(DMGetLocalSection(dm, &s)); 156 PetscCall(DMLabelGetStratumIS(face_label, wall, &wall_is)); 157 if (wall_is) { // There exist such points on this process 158 PetscInt num_points; 159 PetscInt num_comp = 0; 160 const PetscInt *points; 161 PetscCall(PetscSectionGetFieldComponents(s, 0, &num_comp)); 162 PetscCall(ISGetSize(wall_is, &num_points)); 163 PetscCall(ISGetIndices(wall_is, &points)); 164 for (PetscInt i = 0; i < num_points; i++) { 165 const PetscInt p = points[i]; 166 const StateConservative *r; 167 PetscCall(DMPlexPointLocalRead(dm, p, g, &r)); 168 PetscCall(PetscSectionGetDof(s, p, &dof)); 169 for (PetscInt node = 0; node < dof / num_comp; node++) { 170 for (PetscInt j = 0; j < 3; j++) { 171 reaction_force[w * dim + j] -= r[node].momentum[j]; 172 } 173 } 174 } 175 PetscCall(ISRestoreIndices(wall_is, &points)); 176 } 177 PetscCall(ISDestroy(&wall_is)); 178 } 179 PetscCallMPI(MPI_Allreduce(MPI_IN_PLACE, reaction_force, dim * num_walls, MPIU_SCALAR, MPI_SUM, comm)); 180 // Restore Vectors 181 PetscCall(VecRestoreArrayRead(G_loc, &g)); 182 183 PetscFunctionReturn(0); 184 } 185 186 // Implicit time-stepper function setup 187 PetscErrorCode IFunction_NS(TS ts, PetscReal t, Vec Q, Vec Q_dot, Vec G, void *user_data) { 188 User user = *(User *)user_data; 189 MPI_Comm comm = PetscObjectComm((PetscObject)ts); 190 PetscScalar dt; 191 Vec Q_loc = user->Q_loc, Q_dot_loc = user->Q_dot_loc, G_loc; 192 PetscMemType q_mem_type, q_dot_mem_type, g_mem_type; 193 PetscFunctionBeginUser; 194 195 // Get local vectors 196 PetscCall(DMGetNamedLocalVector(user->dm, "ResidualLocal", &G_loc)); 197 198 // Update time dependent data 199 PetscCall(UpdateBoundaryValues(user, Q_loc, t)); 200 if (user->phys->solution_time_label) PetscCall(UpdateContextLabel(comm, t, user->op_ifunction, user->phys->solution_time_label)); 201 PetscCall(TSGetTimeStep(ts, &dt)); 202 if (user->phys->timestep_size_label) PetscCall(UpdateContextLabel(comm, dt, user->op_ifunction, user->phys->timestep_size_label)); 203 204 // Global-to-local 205 PetscCall(DMGlobalToLocalBegin(user->dm, Q, INSERT_VALUES, Q_loc)); 206 PetscCall(DMGlobalToLocalBegin(user->dm, Q_dot, INSERT_VALUES, Q_dot_loc)); 207 PetscCall(DMGlobalToLocalEnd(user->dm, Q, INSERT_VALUES, Q_loc)); 208 PetscCall(DMGlobalToLocalEnd(user->dm, Q_dot, INSERT_VALUES, Q_dot_loc)); 209 210 // Place PETSc vectors in CEED vectors 211 PetscCall(VecReadP2C(Q_loc, &q_mem_type, user->q_ceed)); 212 PetscCall(VecReadP2C(Q_dot_loc, &q_dot_mem_type, user->q_dot_ceed)); 213 PetscCall(VecP2C(G_loc, &g_mem_type, user->g_ceed)); 214 215 // Apply CEED operator 216 CeedOperatorApply(user->op_ifunction, user->q_ceed, user->g_ceed, CEED_REQUEST_IMMEDIATE); 217 218 // Restore vectors 219 PetscCall(VecReadC2P(user->q_ceed, q_mem_type, Q_loc)); 220 PetscCall(VecReadC2P(user->q_dot_ceed, q_dot_mem_type, Q_dot_loc)); 221 PetscCall(VecC2P(user->g_ceed, g_mem_type, G_loc)); 222 223 if (user->app_ctx->sgs_model_type == SGS_MODEL_DATA_DRIVEN) { 224 PetscCall(SGS_DD_ModelApplyIFunction(user, Q_loc, G_loc)); 225 } 226 227 // Local-to-Global 228 PetscCall(VecZeroEntries(G)); 229 PetscCall(DMLocalToGlobal(user->dm, G_loc, ADD_VALUES, G)); 230 231 // Restore vectors 232 PetscCall(DMRestoreNamedLocalVector(user->dm, "ResidualLocal", &G_loc)); 233 234 PetscFunctionReturn(0); 235 } 236 237 static PetscErrorCode FormPreallocation(User user, PetscBool pbdiagonal, Mat J, CeedVector *coo_values) { 238 PetscCount ncoo; 239 PetscInt *rows, *cols; 240 241 PetscFunctionBeginUser; 242 if (pbdiagonal) { 243 CeedSize l_size; 244 CeedOperatorGetActiveVectorLengths(user->op_ijacobian, &l_size, NULL); 245 ncoo = l_size * 5; 246 rows = malloc(ncoo * sizeof(rows[0])); 247 cols = malloc(ncoo * sizeof(cols[0])); 248 for (PetscCount n = 0; n < l_size / 5; n++) { 249 for (PetscInt i = 0; i < 5; i++) { 250 for (PetscInt j = 0; j < 5; j++) { 251 rows[(n * 5 + i) * 5 + j] = n * 5 + i; 252 cols[(n * 5 + i) * 5 + j] = n * 5 + j; 253 } 254 } 255 } 256 } else { 257 PetscCall(CeedOperatorLinearAssembleSymbolic(user->op_ijacobian, &ncoo, &rows, &cols)); 258 } 259 PetscCall(MatSetPreallocationCOOLocal(J, ncoo, rows, cols)); 260 free(rows); 261 free(cols); 262 CeedVectorCreate(user->ceed, ncoo, coo_values); 263 PetscFunctionReturn(0); 264 } 265 266 static PetscErrorCode FormSetValues(User user, PetscBool pbdiagonal, Mat J, CeedVector coo_values) { 267 CeedMemType mem_type = CEED_MEM_HOST; 268 const PetscScalar *values; 269 MatType mat_type; 270 271 PetscFunctionBeginUser; 272 PetscCall(MatGetType(J, &mat_type)); 273 if (strstr(mat_type, "kokkos") || strstr(mat_type, "cusparse")) mem_type = CEED_MEM_DEVICE; 274 if (user->app_ctx->pmat_pbdiagonal) { 275 CeedOperatorLinearAssemblePointBlockDiagonal(user->op_ijacobian, coo_values, CEED_REQUEST_IMMEDIATE); 276 } else { 277 CeedOperatorLinearAssemble(user->op_ijacobian, coo_values); 278 } 279 CeedVectorGetArrayRead(coo_values, mem_type, &values); 280 PetscCall(MatSetValuesCOO(J, values, INSERT_VALUES)); 281 CeedVectorRestoreArrayRead(coo_values, &values); 282 PetscFunctionReturn(0); 283 } 284 285 PetscErrorCode FormIJacobian_NS(TS ts, PetscReal t, Vec Q, Vec Q_dot, PetscReal shift, Mat J, Mat J_pre, void *user_data) { 286 User user = *(User *)user_data; 287 PetscBool J_is_shell, J_is_mffd, J_pre_is_shell; 288 PetscFunctionBeginUser; 289 if (user->phys->ijacobian_time_shift_label) CeedOperatorSetContextDouble(user->op_ijacobian, user->phys->ijacobian_time_shift_label, &shift); 290 PetscCall(PetscObjectTypeCompare((PetscObject)J, MATMFFD, &J_is_mffd)); 291 PetscCall(PetscObjectTypeCompare((PetscObject)J, MATSHELL, &J_is_shell)); 292 PetscCall(PetscObjectTypeCompare((PetscObject)J_pre, MATSHELL, &J_pre_is_shell)); 293 if (!user->matrices_set_up) { 294 if (J_is_shell) { 295 OperatorApplyContext op_ijacobian_ctx; 296 OperatorApplyContextCreate(user->dm, user->dm, user->ceed, user->op_ijacobian, user->q_ceed, user->g_ceed, user->Q_dot_loc, NULL, 297 &op_ijacobian_ctx); 298 PetscCall(MatShellSetContext(J, op_ijacobian_ctx)); 299 PetscCall(MatShellSetContextDestroy(J, (PetscErrorCode(*)(void *))OperatorApplyContextDestroy)); 300 PetscCall(MatShellSetOperation(J, MATOP_MULT, (void (*)(void))MatMult_Ceed)); 301 PetscCall(MatShellSetOperation(J, MATOP_GET_DIAGONAL, (void (*)(void))MatGetDiag_Ceed)); 302 PetscCall(MatSetUp(J)); 303 } 304 if (!J_pre_is_shell) { 305 PetscCall(FormPreallocation(user, user->app_ctx->pmat_pbdiagonal, J_pre, &user->coo_values_pmat)); 306 } 307 if (J != J_pre && !J_is_shell && !J_is_mffd) { 308 PetscCall(FormPreallocation(user, PETSC_FALSE, J, &user->coo_values_amat)); 309 } 310 user->matrices_set_up = true; 311 } 312 if (!J_pre_is_shell) { 313 PetscCall(FormSetValues(user, user->app_ctx->pmat_pbdiagonal, J_pre, user->coo_values_pmat)); 314 } 315 if (user->coo_values_amat) { 316 PetscCall(FormSetValues(user, PETSC_FALSE, J, user->coo_values_amat)); 317 } else if (J_is_mffd) { 318 PetscCall(MatAssemblyBegin(J, MAT_FINAL_ASSEMBLY)); 319 PetscCall(MatAssemblyEnd(J, MAT_FINAL_ASSEMBLY)); 320 } 321 PetscFunctionReturn(0); 322 } 323 324 PetscErrorCode WriteOutput(User user, Vec Q, PetscInt step_no, PetscScalar time) { 325 Vec Q_loc; 326 char file_path[PETSC_MAX_PATH_LEN]; 327 PetscViewer viewer; 328 PetscFunctionBeginUser; 329 330 if (user->app_ctx->checkpoint_vtk) { 331 // Set up output 332 PetscCall(DMGetLocalVector(user->dm, &Q_loc)); 333 PetscCall(PetscObjectSetName((PetscObject)Q_loc, "StateVec")); 334 PetscCall(VecZeroEntries(Q_loc)); 335 PetscCall(DMGlobalToLocal(user->dm, Q, INSERT_VALUES, Q_loc)); 336 337 // Output 338 PetscCall(PetscSNPrintf(file_path, sizeof file_path, "%s/ns-%03" PetscInt_FMT ".vtu", user->app_ctx->output_dir, step_no)); 339 340 PetscCall(PetscViewerVTKOpen(PetscObjectComm((PetscObject)Q), file_path, FILE_MODE_WRITE, &viewer)); 341 PetscCall(VecView(Q_loc, viewer)); 342 PetscCall(PetscViewerDestroy(&viewer)); 343 if (user->dm_viz) { 344 Vec Q_refined, Q_refined_loc; 345 char file_path_refined[PETSC_MAX_PATH_LEN]; 346 PetscViewer viewer_refined; 347 348 PetscCall(DMGetGlobalVector(user->dm_viz, &Q_refined)); 349 PetscCall(DMGetLocalVector(user->dm_viz, &Q_refined_loc)); 350 PetscCall(PetscObjectSetName((PetscObject)Q_refined_loc, "Refined")); 351 352 PetscCall(MatInterpolate(user->interp_viz, Q, Q_refined)); 353 PetscCall(VecZeroEntries(Q_refined_loc)); 354 PetscCall(DMGlobalToLocal(user->dm_viz, Q_refined, INSERT_VALUES, Q_refined_loc)); 355 356 PetscCall( 357 PetscSNPrintf(file_path_refined, sizeof file_path_refined, "%s/nsrefined-%03" PetscInt_FMT ".vtu", user->app_ctx->output_dir, step_no)); 358 359 PetscCall(PetscViewerVTKOpen(PetscObjectComm((PetscObject)Q_refined), file_path_refined, FILE_MODE_WRITE, &viewer_refined)); 360 PetscCall(VecView(Q_refined_loc, viewer_refined)); 361 PetscCall(DMRestoreLocalVector(user->dm_viz, &Q_refined_loc)); 362 PetscCall(DMRestoreGlobalVector(user->dm_viz, &Q_refined)); 363 PetscCall(PetscViewerDestroy(&viewer_refined)); 364 } 365 PetscCall(DMRestoreLocalVector(user->dm, &Q_loc)); 366 } 367 368 // Save data in a binary file for continuation of simulations 369 if (user->app_ctx->add_stepnum2bin) { 370 PetscCall(PetscSNPrintf(file_path, sizeof file_path, "%s/ns-solution-%" PetscInt_FMT ".bin", user->app_ctx->output_dir, step_no)); 371 } else { 372 PetscCall(PetscSNPrintf(file_path, sizeof file_path, "%s/ns-solution.bin", user->app_ctx->output_dir)); 373 } 374 PetscCall(PetscViewerBinaryOpen(user->comm, file_path, FILE_MODE_WRITE, &viewer)); 375 376 PetscInt token = FLUIDS_FILE_TOKEN; 377 PetscCall(PetscViewerBinaryWrite(viewer, &token, 1, PETSC_INT)); 378 PetscCall(PetscViewerBinaryWrite(viewer, &step_no, 1, PETSC_INT)); 379 time /= user->units->second; // Dimensionalize time back 380 PetscCall(PetscViewerBinaryWrite(viewer, &time, 1, PETSC_REAL)); 381 PetscCall(VecView(Q, viewer)); 382 PetscCall(PetscViewerDestroy(&viewer)); 383 PetscFunctionReturn(0); 384 } 385 386 // CSV Monitor 387 PetscErrorCode TSMonitor_WallForce(TS ts, PetscInt step_no, PetscReal time, Vec Q, void *ctx) { 388 User user = ctx; 389 Vec G_loc; 390 PetscInt num_wall = user->app_ctx->wall_forces.num_wall, dim = 3; 391 const PetscInt *walls = user->app_ctx->wall_forces.walls; 392 PetscViewer viewer = user->app_ctx->wall_forces.viewer; 393 PetscViewerFormat format = user->app_ctx->wall_forces.viewer_format; 394 PetscScalar *reaction_force; 395 PetscBool iascii; 396 397 PetscFunctionBeginUser; 398 if (!viewer) PetscFunctionReturn(0); 399 PetscCall(DMGetNamedLocalVector(user->dm, "ResidualLocal", &G_loc)); 400 PetscCall(PetscMalloc1(num_wall * dim, &reaction_force)); 401 PetscCall(Surface_Forces_NS(user->dm, G_loc, num_wall, walls, reaction_force)); 402 PetscCall(DMRestoreNamedLocalVector(user->dm, "ResidualLocal", &G_loc)); 403 404 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii)); 405 406 if (iascii) { 407 if (format == PETSC_VIEWER_ASCII_CSV && !user->app_ctx->wall_forces.header_written) { 408 PetscCall(PetscViewerASCIIPrintf(viewer, "Step,Time,Wall,ForceX,ForceY,ForceZ\n")); 409 user->app_ctx->wall_forces.header_written = PETSC_TRUE; 410 } 411 for (PetscInt w = 0; w < num_wall; w++) { 412 PetscInt wall = walls[w]; 413 if (format == PETSC_VIEWER_ASCII_CSV) { 414 PetscCall(PetscViewerASCIIPrintf(viewer, "%" PetscInt_FMT ",%g,%" PetscInt_FMT ",%g,%g,%g\n", step_no, time, wall, 415 reaction_force[w * dim + 0], reaction_force[w * dim + 1], reaction_force[w * dim + 2])); 416 417 } else { 418 PetscCall(PetscViewerASCIIPrintf(viewer, "Wall %" PetscInt_FMT " Forces: Force_x = %12g, Force_y = %12g, Force_z = %12g\n", wall, 419 reaction_force[w * dim + 0], reaction_force[w * dim + 1], reaction_force[w * dim + 2])); 420 } 421 } 422 } 423 PetscCall(PetscFree(reaction_force)); 424 PetscFunctionReturn(0); 425 } 426 427 // User provided TS Monitor 428 PetscErrorCode TSMonitor_NS(TS ts, PetscInt step_no, PetscReal time, Vec Q, void *ctx) { 429 User user = ctx; 430 PetscFunctionBeginUser; 431 432 // Print every 'checkpoint_interval' steps 433 if (user->app_ctx->checkpoint_interval <= 0 || step_no % user->app_ctx->checkpoint_interval != 0 || 434 (user->app_ctx->cont_steps == step_no && step_no != 0)) { 435 PetscFunctionReturn(0); 436 } 437 438 PetscCall(WriteOutput(user, Q, step_no, time)); 439 440 PetscFunctionReturn(0); 441 } 442 443 // TS: Create, setup, and solve 444 PetscErrorCode TSSolve_NS(DM dm, User user, AppCtx app_ctx, Physics phys, Vec *Q, PetscScalar *f_time, TS *ts) { 445 MPI_Comm comm = user->comm; 446 TSAdapt adapt; 447 PetscScalar final_time; 448 PetscFunctionBeginUser; 449 450 PetscCall(TSCreate(comm, ts)); 451 PetscCall(TSSetDM(*ts, dm)); 452 if (phys->implicit) { 453 PetscCall(TSSetType(*ts, TSBDF)); 454 if (user->op_ifunction) { 455 PetscCall(TSSetIFunction(*ts, NULL, IFunction_NS, &user)); 456 } else { // Implicit integrators can fall back to using an RHSFunction 457 PetscCall(TSSetRHSFunction(*ts, NULL, RHS_NS, &user)); 458 } 459 if (user->op_ijacobian) { 460 PetscCall(DMTSSetIJacobian(dm, FormIJacobian_NS, &user)); 461 if (app_ctx->amat_type) { 462 Mat Pmat, Amat; 463 PetscCall(DMCreateMatrix(dm, &Pmat)); 464 PetscCall(DMSetMatType(dm, app_ctx->amat_type)); 465 PetscCall(DMCreateMatrix(dm, &Amat)); 466 PetscCall(TSSetIJacobian(*ts, Amat, Pmat, NULL, NULL)); 467 PetscCall(MatDestroy(&Amat)); 468 PetscCall(MatDestroy(&Pmat)); 469 } 470 } 471 } else { 472 PetscCheck(user->op_rhs, comm, PETSC_ERR_ARG_NULL, "Problem does not provide RHSFunction"); 473 PetscCall(TSSetType(*ts, TSRK)); 474 PetscCall(TSRKSetType(*ts, TSRK5F)); 475 PetscCall(TSSetRHSFunction(*ts, NULL, RHS_NS, &user)); 476 } 477 PetscCall(TSSetMaxTime(*ts, 500. * user->units->second)); 478 PetscCall(TSSetExactFinalTime(*ts, TS_EXACTFINALTIME_STEPOVER)); 479 if (app_ctx->test_type == TESTTYPE_NONE) PetscCall(TSSetErrorIfStepFails(*ts, PETSC_FALSE)); 480 PetscCall(TSSetTimeStep(*ts, 1.e-2 * user->units->second)); 481 if (app_ctx->test_type != TESTTYPE_NONE) { 482 PetscCall(TSSetMaxSteps(*ts, 10)); 483 } 484 PetscCall(TSGetAdapt(*ts, &adapt)); 485 PetscCall(TSAdaptSetStepLimits(adapt, 1.e-12 * user->units->second, 1.e2 * user->units->second)); 486 PetscCall(TSSetFromOptions(*ts)); 487 user->time_bc_set = -1.0; // require all BCs be updated 488 if (!app_ctx->cont_steps) { // print initial condition 489 if (app_ctx->test_type == TESTTYPE_NONE) { 490 PetscCall(TSMonitor_NS(*ts, 0, 0., *Q, user)); 491 } 492 } else { // continue from time of last output 493 PetscInt count; 494 PetscViewer viewer; 495 496 if (app_ctx->cont_time <= 0) { // Legacy files did not include step number and time 497 PetscCall(PetscViewerBinaryOpen(comm, app_ctx->cont_time_file, FILE_MODE_READ, &viewer)); 498 PetscCall(PetscViewerBinaryRead(viewer, &app_ctx->cont_time, 1, &count, PETSC_REAL)); 499 PetscCall(PetscViewerDestroy(&viewer)); 500 PetscCheck(app_ctx->cont_steps != -1, comm, PETSC_ERR_ARG_INCOMP, 501 "-continue step number not specified, but checkpoint file does not contain a step number (likely written by older code version)"); 502 } 503 PetscCall(TSSetTime(*ts, app_ctx->cont_time * user->units->second)); 504 PetscCall(TSSetStepNumber(*ts, app_ctx->cont_steps)); 505 } 506 if (app_ctx->test_type == TESTTYPE_NONE) { 507 PetscCall(TSMonitorSet(*ts, TSMonitor_NS, user, NULL)); 508 } 509 if (app_ctx->wall_forces.viewer) { 510 PetscCall(TSMonitorSet(*ts, TSMonitor_WallForce, user, NULL)); 511 } 512 if (app_ctx->turb_spanstats_enable) { 513 PetscCall(TSMonitorSet(*ts, TSMonitor_Statistics, user, NULL)); 514 CeedScalar previous_time = app_ctx->cont_time * user->units->second; 515 CeedOperatorSetContextDouble(user->spanstats.op_stats_collect_ctx->op, user->spanstats.previous_time_label, &previous_time); 516 } 517 518 // Solve 519 PetscReal start_time; 520 PetscInt start_step; 521 PetscCall(TSGetTime(*ts, &start_time)); 522 PetscCall(TSGetStepNumber(*ts, &start_step)); 523 524 PetscPreLoadBegin(PETSC_FALSE, "Fluids Solve"); 525 PetscCall(TSSetTime(*ts, start_time)); 526 PetscCall(TSSetStepNumber(*ts, start_step)); 527 if (PetscPreLoadingOn) { 528 // LCOV_EXCL_START 529 SNES snes; 530 Vec Q_preload; 531 PetscReal rtol; 532 PetscCall(VecDuplicate(*Q, &Q_preload)); 533 PetscCall(VecCopy(*Q, Q_preload)); 534 PetscCall(TSGetSNES(*ts, &snes)); 535 PetscCall(SNESGetTolerances(snes, NULL, &rtol, NULL, NULL, NULL)); 536 PetscCall(SNESSetTolerances(snes, PETSC_DEFAULT, .99, PETSC_DEFAULT, PETSC_DEFAULT, PETSC_DEFAULT)); 537 PetscCall(TSSetSolution(*ts, *Q)); 538 PetscCall(TSStep(*ts)); 539 PetscCall(SNESSetTolerances(snes, PETSC_DEFAULT, rtol, PETSC_DEFAULT, PETSC_DEFAULT, PETSC_DEFAULT)); 540 PetscCall(VecDestroy(&Q_preload)); 541 // LCOV_EXCL_STOP 542 } else { 543 PetscCall(PetscBarrier((PetscObject)*ts)); 544 PetscCall(TSSolve(*ts, *Q)); 545 } 546 PetscPreLoadEnd(); 547 548 PetscCall(TSGetSolveTime(*ts, &final_time)); 549 *f_time = final_time; 550 551 if (app_ctx->test_type == TESTTYPE_NONE) { 552 PetscInt step_no; 553 PetscCall(TSGetStepNumber(*ts, &step_no)); 554 if (user->app_ctx->checkpoint_interval > 0 || user->app_ctx->checkpoint_interval == -1) { 555 PetscCall(WriteOutput(user, *Q, step_no, final_time)); 556 } 557 558 PetscLogEvent stage_id; 559 PetscStageLog stage_log; 560 561 PetscCall(PetscLogStageGetId("Fluids Solve", &stage_id)); 562 PetscCall(PetscLogGetStageLog(&stage_log)); 563 PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Time taken for solution (sec): %g\n", stage_log->stageInfo[stage_id].perfInfo.time)); 564 } 565 PetscFunctionReturn(0); 566 } 567