xref: /libCEED/examples/fluids/src/setupts.c (revision ed9ed3dea34fef5853dd3f23fdf30a7807a29b2c)
1 // Copyright (c) 2017-2022, Lawrence Livermore National Security, LLC and other CEED contributors.
2 // All Rights Reserved. See the top-level LICENSE and NOTICE files for details.
3 //
4 // SPDX-License-Identifier: BSD-2-Clause
5 //
6 // This file is part of CEED:  http://github.com/ceed
7 
8 /// @file
9 /// Time-stepping functions for Navier-Stokes example using PETSc
10 
11 #include <ceed.h>
12 #include <petscdmplex.h>
13 #include <petscts.h>
14 
15 #include "../navierstokes.h"
16 #include "../qfunctions/newtonian_state.h"
17 
18 // @brief Create KSP to solve the inverse mass operator for explicit time stepping schemes
19 PetscErrorCode CreateKSPMassOperator(User user, CeedData ceed_data) {
20   Ceed                 ceed = user->ceed;
21   DM                   dm   = user->dm;
22   CeedQFunction        qf_mass;
23   CeedOperator         op_mass;
24   OperatorApplyContext mass_matop_ctx;
25   CeedInt              num_comp_q, q_data_size;
26 
27   PetscFunctionBeginUser;
28   PetscCallCeed(ceed, CeedElemRestrictionGetNumComponents(ceed_data->elem_restr_q, &num_comp_q));
29   PetscCallCeed(ceed, CeedElemRestrictionGetNumComponents(ceed_data->elem_restr_qd_i, &q_data_size));
30 
31   PetscCall(CreateMassQFunction(ceed, num_comp_q, q_data_size, &qf_mass));
32   PetscCallCeed(ceed, CeedOperatorCreate(ceed, qf_mass, NULL, NULL, &op_mass));
33   PetscCallCeed(ceed, CeedOperatorSetField(op_mass, "u", ceed_data->elem_restr_q, ceed_data->basis_q, CEED_VECTOR_ACTIVE));
34   PetscCallCeed(ceed, CeedOperatorSetField(op_mass, "qdata", ceed_data->elem_restr_qd_i, CEED_BASIS_NONE, ceed_data->q_data));
35   PetscCallCeed(ceed, CeedOperatorSetField(op_mass, "v", ceed_data->elem_restr_q, ceed_data->basis_q, CEED_VECTOR_ACTIVE));
36 
37   {  // -- Setup KSP for mass operator
38     Mat      mat_mass = NULL;
39     Vec      Zeros_loc;
40     MPI_Comm comm = PetscObjectComm((PetscObject)dm);
41 
42     PetscCall(DMCreateLocalVector(dm, &Zeros_loc));
43     PetscCall(VecZeroEntries(Zeros_loc));
44     PetscCall(OperatorApplyContextCreate(dm, dm, ceed, op_mass, NULL, NULL, Zeros_loc, NULL, &mass_matop_ctx));
45     PetscCall(CreateMatShell_Ceed(mass_matop_ctx, &mat_mass));
46 
47     PetscCall(KSPCreate(comm, &user->mass_ksp));
48     PetscCall(KSPSetOptionsPrefix(user->mass_ksp, "mass_"));
49     {  // lumped by default
50       PC pc;
51       PetscCall(KSPGetPC(user->mass_ksp, &pc));
52       PetscCall(PCSetType(pc, PCJACOBI));
53       PetscCall(PCJacobiSetType(pc, PC_JACOBI_ROWSUM));
54       PetscCall(KSPSetType(user->mass_ksp, KSPPREONLY));
55     }
56     PetscCall(KSPSetOperators(user->mass_ksp, mat_mass, mat_mass));
57     PetscCall(KSPSetFromOptions(user->mass_ksp));
58     PetscCall(VecDestroy(&Zeros_loc));
59     PetscCall(MatDestroy(&mat_mass));
60   }
61 
62   // Cleanup
63   PetscCallCeed(ceed, CeedQFunctionDestroy(&qf_mass));
64   PetscCallCeed(ceed, CeedOperatorDestroy(&op_mass));
65   PetscFunctionReturn(PETSC_SUCCESS);
66 }
67 
68 // Insert Boundary values if it's a new time
69 PetscErrorCode UpdateBoundaryValues(User user, Vec Q_loc, PetscReal t) {
70   PetscFunctionBeginUser;
71   if (user->time_bc_set != t) {
72     PetscCall(DMPlexInsertBoundaryValues(user->dm, PETSC_TRUE, Q_loc, t, NULL, NULL, NULL));
73     user->time_bc_set = t;
74   }
75   PetscFunctionReturn(PETSC_SUCCESS);
76 }
77 
78 // RHS (Explicit time-stepper) function setup
79 //   This is the RHS of the ODE, given as u_t = G(t,u)
80 //   This function takes in a state vector Q and writes into G
81 PetscErrorCode RHS_NS(TS ts, PetscReal t, Vec Q, Vec G, void *user_data) {
82   User        user = *(User *)user_data;
83   Ceed        ceed = user->ceed;
84   PetscScalar dt;
85   Vec         Q_loc = user->Q_loc;
86 
87   PetscFunctionBeginUser;
88   // Update time dependent data
89   PetscCall(UpdateBoundaryValues(user, Q_loc, t));
90   if (user->phys->solution_time_label) PetscCallCeed(ceed, CeedOperatorSetContextDouble(user->op_rhs_ctx->op, user->phys->solution_time_label, &t));
91   PetscCall(TSGetTimeStep(ts, &dt));
92   if (user->phys->timestep_size_label) PetscCallCeed(ceed, CeedOperatorSetContextDouble(user->op_rhs_ctx->op, user->phys->timestep_size_label, &dt));
93 
94   PetscCall(ApplyCeedOperatorGlobalToGlobal(Q, G, user->op_rhs_ctx));
95 
96   // Inverse of the lumped mass matrix
97   PetscCall(KSPSolve(user->mass_ksp, G, G));
98   PetscFunctionReturn(PETSC_SUCCESS);
99 }
100 
101 // Surface forces function setup
102 static PetscErrorCode Surface_Forces_NS(DM dm, Vec G_loc, PetscInt num_walls, const PetscInt walls[], PetscScalar *reaction_force) {
103   DMLabel            face_label;
104   const PetscScalar *g;
105   PetscInt           dof, dim = 3;
106   MPI_Comm           comm;
107   PetscSection       s;
108 
109   PetscFunctionBeginUser;
110   PetscCall(PetscArrayzero(reaction_force, num_walls * dim));
111   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
112   PetscCall(DMGetLabel(dm, "Face Sets", &face_label));
113   PetscCall(VecGetArrayRead(G_loc, &g));
114   for (PetscInt w = 0; w < num_walls; w++) {
115     const PetscInt wall = walls[w];
116     IS             wall_is;
117     PetscCall(DMGetLocalSection(dm, &s));
118     PetscCall(DMLabelGetStratumIS(face_label, wall, &wall_is));
119     if (wall_is) {  // There exist such points on this process
120       PetscInt        num_points;
121       PetscInt        num_comp = 0;
122       const PetscInt *points;
123       PetscCall(PetscSectionGetFieldComponents(s, 0, &num_comp));
124       PetscCall(ISGetSize(wall_is, &num_points));
125       PetscCall(ISGetIndices(wall_is, &points));
126       for (PetscInt i = 0; i < num_points; i++) {
127         const PetscInt           p = points[i];
128         const StateConservative *r;
129         PetscCall(DMPlexPointLocalRead(dm, p, g, &r));
130         PetscCall(PetscSectionGetDof(s, p, &dof));
131         for (PetscInt node = 0; node < dof / num_comp; node++) {
132           for (PetscInt j = 0; j < 3; j++) {
133             reaction_force[w * dim + j] -= r[node].momentum[j];
134           }
135         }
136       }
137       PetscCall(ISRestoreIndices(wall_is, &points));
138     }
139     PetscCall(ISDestroy(&wall_is));
140   }
141   PetscCallMPI(MPI_Allreduce(MPI_IN_PLACE, reaction_force, dim * num_walls, MPIU_SCALAR, MPI_SUM, comm));
142   //  Restore Vectors
143   PetscCall(VecRestoreArrayRead(G_loc, &g));
144   PetscFunctionReturn(PETSC_SUCCESS);
145 }
146 
147 // Implicit time-stepper function setup
148 PetscErrorCode IFunction_NS(TS ts, PetscReal t, Vec Q, Vec Q_dot, Vec G, void *user_data) {
149   User         user = *(User *)user_data;
150   Ceed         ceed = user->ceed;
151   PetscScalar  dt;
152   Vec          Q_loc = user->Q_loc, Q_dot_loc = user->Q_dot_loc, G_loc;
153   PetscMemType q_mem_type, q_dot_mem_type, g_mem_type;
154 
155   PetscFunctionBeginUser;
156   // Get local vectors
157   PetscCall(DMGetNamedLocalVector(user->dm, "ResidualLocal", &G_loc));
158 
159   // Update time dependent data
160   PetscCall(UpdateBoundaryValues(user, Q_loc, t));
161   if (user->phys->solution_time_label) PetscCallCeed(ceed, CeedOperatorSetContextDouble(user->op_ifunction, user->phys->solution_time_label, &t));
162   PetscCall(TSGetTimeStep(ts, &dt));
163   if (user->phys->timestep_size_label) PetscCallCeed(ceed, CeedOperatorSetContextDouble(user->op_ifunction, user->phys->timestep_size_label, &dt));
164 
165   // Global-to-local
166   PetscCall(DMGlobalToLocalBegin(user->dm, Q, INSERT_VALUES, Q_loc));
167   PetscCall(DMGlobalToLocalBegin(user->dm, Q_dot, INSERT_VALUES, Q_dot_loc));
168   PetscCall(DMGlobalToLocalEnd(user->dm, Q, INSERT_VALUES, Q_loc));
169   PetscCall(DMGlobalToLocalEnd(user->dm, Q_dot, INSERT_VALUES, Q_dot_loc));
170 
171   // Place PETSc vectors in CEED vectors
172   PetscCall(VecReadP2C(Q_loc, &q_mem_type, user->q_ceed));
173   PetscCall(VecReadP2C(Q_dot_loc, &q_dot_mem_type, user->q_dot_ceed));
174   PetscCall(VecP2C(G_loc, &g_mem_type, user->g_ceed));
175 
176   // Apply CEED operator
177   PetscCall(PetscLogEventBegin(FLUIDS_CeedOperatorApply, Q, G, 0, 0));
178   PetscCall(PetscLogGpuTimeBegin());
179   PetscCallCeed(user->ceed, CeedOperatorApply(user->op_ifunction, user->q_ceed, user->g_ceed, CEED_REQUEST_IMMEDIATE));
180   PetscCall(PetscLogGpuTimeEnd());
181   PetscCall(PetscLogEventEnd(FLUIDS_CeedOperatorApply, Q, G, 0, 0));
182 
183   // Restore vectors
184   PetscCall(VecReadC2P(user->q_ceed, q_mem_type, Q_loc));
185   PetscCall(VecReadC2P(user->q_dot_ceed, q_dot_mem_type, Q_dot_loc));
186   PetscCall(VecC2P(user->g_ceed, g_mem_type, G_loc));
187 
188   if (user->app_ctx->sgs_model_type == SGS_MODEL_DATA_DRIVEN) {
189     PetscCall(SgsDDApplyIFunction(user, Q_loc, G_loc));
190   }
191 
192   // Local-to-Global
193   PetscCall(VecZeroEntries(G));
194   PetscCall(DMLocalToGlobal(user->dm, G_loc, ADD_VALUES, G));
195 
196   // Restore vectors
197   PetscCall(DMRestoreNamedLocalVector(user->dm, "ResidualLocal", &G_loc));
198   PetscFunctionReturn(PETSC_SUCCESS);
199 }
200 
201 static PetscErrorCode FormPreallocation(User user, PetscBool pbdiagonal, Mat J, CeedVector *coo_values) {
202   PetscCount ncoo;
203   PetscInt  *rows_petsc, *cols_petsc;
204   CeedInt   *rows_ceed, *cols_ceed;
205 
206   PetscFunctionBeginUser;
207   if (pbdiagonal) {
208     PetscCallCeed(user->ceed, CeedOperatorLinearAssemblePointBlockDiagonalSymbolic(user->op_ijacobian, &ncoo, &rows_ceed, &cols_ceed));
209   } else {
210     PetscCallCeed(user->ceed, CeedOperatorLinearAssembleSymbolic(user->op_ijacobian, &ncoo, &rows_ceed, &cols_ceed));
211   }
212   PetscCall(IntArrayC2P(ncoo, &rows_ceed, &rows_petsc));
213   PetscCall(IntArrayC2P(ncoo, &cols_ceed, &cols_petsc));
214   PetscCall(MatSetPreallocationCOOLocal(J, ncoo, rows_petsc, cols_petsc));
215   free(rows_petsc);
216   free(cols_petsc);
217   PetscCallCeed(user->ceed, CeedVectorCreate(user->ceed, ncoo, coo_values));
218   PetscFunctionReturn(PETSC_SUCCESS);
219 }
220 
221 static PetscErrorCode FormSetValues(User user, PetscBool pbdiagonal, Mat J, CeedVector coo_values) {
222   CeedMemType        mem_type = CEED_MEM_HOST;
223   const PetscScalar *values;
224   MatType            mat_type;
225 
226   PetscFunctionBeginUser;
227   PetscCall(MatGetType(J, &mat_type));
228   if (strstr(mat_type, "kokkos") || strstr(mat_type, "cusparse")) mem_type = CEED_MEM_DEVICE;
229   if (pbdiagonal) {
230     PetscCall(PetscLogEventBegin(FLUIDS_CeedOperatorAssemblePointBlockDiagonal, J, 0, 0, 0));
231     PetscCall(PetscLogGpuTimeBegin());
232     PetscCallCeed(user->ceed, CeedOperatorLinearAssemblePointBlockDiagonal(user->op_ijacobian, coo_values, CEED_REQUEST_IMMEDIATE));
233     PetscCall(PetscLogGpuTimeEnd());
234     PetscCall(PetscLogEventEnd(FLUIDS_CeedOperatorAssemblePointBlockDiagonal, J, 0, 0, 0));
235   } else {
236     PetscCall(PetscLogEventBegin(FLUIDS_CeedOperatorAssemble, J, 0, 0, 0));
237     PetscCall(PetscLogGpuTimeBegin());
238     PetscCallCeed(user->ceed, CeedOperatorLinearAssemble(user->op_ijacobian, coo_values));
239     PetscCall(PetscLogGpuTimeEnd());
240     PetscCall(PetscLogEventEnd(FLUIDS_CeedOperatorAssemble, J, 0, 0, 0));
241   }
242   PetscCallCeed(user->ceed, CeedVectorGetArrayRead(coo_values, mem_type, &values));
243   PetscCall(MatSetValuesCOO(J, values, INSERT_VALUES));
244   PetscCallCeed(user->ceed, CeedVectorRestoreArrayRead(coo_values, &values));
245   PetscFunctionReturn(PETSC_SUCCESS);
246 }
247 
248 PetscErrorCode FormIJacobian_NS(TS ts, PetscReal t, Vec Q, Vec Q_dot, PetscReal shift, Mat J, Mat J_pre, void *user_data) {
249   User      user = *(User *)user_data;
250   Ceed      ceed = user->ceed;
251   PetscBool J_is_shell, J_is_mffd, J_pre_is_shell;
252 
253   PetscFunctionBeginUser;
254   if (user->phys->ijacobian_time_shift_label)
255     PetscCallCeed(ceed, CeedOperatorSetContextDouble(user->op_ijacobian, user->phys->ijacobian_time_shift_label, &shift));
256   PetscCall(PetscObjectTypeCompare((PetscObject)J, MATMFFD, &J_is_mffd));
257   PetscCall(PetscObjectTypeCompare((PetscObject)J, MATSHELL, &J_is_shell));
258   PetscCall(PetscObjectTypeCompare((PetscObject)J_pre, MATSHELL, &J_pre_is_shell));
259   if (!user->matrices_set_up) {
260     if (J_is_shell) {
261       OperatorApplyContext op_ijacobian_ctx;
262       OperatorApplyContextCreate(user->dm, user->dm, user->ceed, user->op_ijacobian, user->q_ceed, user->g_ceed, user->Q_dot_loc, NULL,
263                                  &op_ijacobian_ctx);
264       PetscCall(CreateMatShell_Ceed(op_ijacobian_ctx, &J));
265       PetscCall(MatSetUp(J));
266     }
267     if (!J_pre_is_shell) {
268       PetscCall(FormPreallocation(user, user->app_ctx->pmat_pbdiagonal, J_pre, &user->coo_values_pmat));
269     }
270     if (J != J_pre && !J_is_shell && !J_is_mffd) {
271       PetscCall(FormPreallocation(user, PETSC_FALSE, J, &user->coo_values_amat));
272     }
273     user->matrices_set_up = true;
274   }
275   if (!J_pre_is_shell) {
276     PetscCall(FormSetValues(user, user->app_ctx->pmat_pbdiagonal, J_pre, user->coo_values_pmat));
277   }
278   if (user->coo_values_amat) {
279     PetscCall(FormSetValues(user, PETSC_FALSE, J, user->coo_values_amat));
280   } else if (J_is_mffd) {
281     PetscCall(MatAssemblyBegin(J, MAT_FINAL_ASSEMBLY));
282     PetscCall(MatAssemblyEnd(J, MAT_FINAL_ASSEMBLY));
283   }
284   PetscFunctionReturn(PETSC_SUCCESS);
285 }
286 
287 PetscErrorCode WriteOutput(User user, Vec Q, PetscInt step_no, PetscScalar time) {
288   Vec         Q_loc;
289   char        file_path[PETSC_MAX_PATH_LEN];
290   PetscViewer viewer;
291 
292   PetscFunctionBeginUser;
293   if (user->app_ctx->checkpoint_vtk) {
294     // Set up output
295     PetscCall(DMGetLocalVector(user->dm, &Q_loc));
296     PetscCall(PetscObjectSetName((PetscObject)Q_loc, "StateVec"));
297     PetscCall(VecZeroEntries(Q_loc));
298     PetscCall(DMGlobalToLocal(user->dm, Q, INSERT_VALUES, Q_loc));
299 
300     // Output
301     PetscCall(PetscSNPrintf(file_path, sizeof file_path, "%s/ns-%03" PetscInt_FMT ".vtu", user->app_ctx->output_dir, step_no));
302 
303     PetscCall(PetscViewerVTKOpen(PetscObjectComm((PetscObject)Q), file_path, FILE_MODE_WRITE, &viewer));
304     PetscCall(VecView(Q_loc, viewer));
305     PetscCall(PetscViewerDestroy(&viewer));
306     if (user->dm_viz) {
307       Vec         Q_refined, Q_refined_loc;
308       char        file_path_refined[PETSC_MAX_PATH_LEN];
309       PetscViewer viewer_refined;
310 
311       PetscCall(DMGetGlobalVector(user->dm_viz, &Q_refined));
312       PetscCall(DMGetLocalVector(user->dm_viz, &Q_refined_loc));
313       PetscCall(PetscObjectSetName((PetscObject)Q_refined_loc, "Refined"));
314 
315       PetscCall(MatInterpolate(user->interp_viz, Q, Q_refined));
316       PetscCall(VecZeroEntries(Q_refined_loc));
317       PetscCall(DMGlobalToLocal(user->dm_viz, Q_refined, INSERT_VALUES, Q_refined_loc));
318 
319       PetscCall(
320           PetscSNPrintf(file_path_refined, sizeof file_path_refined, "%s/nsrefined-%03" PetscInt_FMT ".vtu", user->app_ctx->output_dir, step_no));
321 
322       PetscCall(PetscViewerVTKOpen(PetscObjectComm((PetscObject)Q_refined), file_path_refined, FILE_MODE_WRITE, &viewer_refined));
323       PetscCall(VecView(Q_refined_loc, viewer_refined));
324       PetscCall(DMRestoreLocalVector(user->dm_viz, &Q_refined_loc));
325       PetscCall(DMRestoreGlobalVector(user->dm_viz, &Q_refined));
326       PetscCall(PetscViewerDestroy(&viewer_refined));
327     }
328     PetscCall(DMRestoreLocalVector(user->dm, &Q_loc));
329   }
330 
331   // Save data in a binary file for continuation of simulations
332   if (user->app_ctx->add_stepnum2bin) {
333     PetscCall(PetscSNPrintf(file_path, sizeof file_path, "%s/ns-solution-%" PetscInt_FMT ".bin", user->app_ctx->output_dir, step_no));
334   } else {
335     PetscCall(PetscSNPrintf(file_path, sizeof file_path, "%s/ns-solution.bin", user->app_ctx->output_dir));
336   }
337   PetscCall(PetscViewerBinaryOpen(user->comm, file_path, FILE_MODE_WRITE, &viewer));
338 
339   PetscInt32 token = PetscDefined(USE_64BIT_INDICES) ? FLUIDS_FILE_TOKEN_64 : FLUIDS_FILE_TOKEN_32;
340   PetscCall(PetscViewerBinaryWrite(viewer, &token, 1, PETSC_INT32));
341   PetscCall(PetscViewerBinaryWrite(viewer, &step_no, 1, PETSC_INT));
342   time /= user->units->second;  // Dimensionalize time back
343   PetscCall(PetscViewerBinaryWrite(viewer, &time, 1, PETSC_REAL));
344   PetscCall(VecView(Q, viewer));
345   PetscCall(PetscViewerDestroy(&viewer));
346   PetscFunctionReturn(PETSC_SUCCESS);
347 }
348 
349 // CSV Monitor
350 PetscErrorCode TSMonitor_WallForce(TS ts, PetscInt step_no, PetscReal time, Vec Q, void *ctx) {
351   User              user = ctx;
352   Vec               G_loc;
353   PetscInt          num_wall = user->app_ctx->wall_forces.num_wall, dim = 3;
354   const PetscInt   *walls  = user->app_ctx->wall_forces.walls;
355   PetscViewer       viewer = user->app_ctx->wall_forces.viewer;
356   PetscViewerFormat format = user->app_ctx->wall_forces.viewer_format;
357   PetscScalar      *reaction_force;
358   PetscBool         iascii;
359 
360   PetscFunctionBeginUser;
361   if (!viewer) PetscFunctionReturn(PETSC_SUCCESS);
362   PetscCall(DMGetNamedLocalVector(user->dm, "ResidualLocal", &G_loc));
363   PetscCall(PetscMalloc1(num_wall * dim, &reaction_force));
364   PetscCall(Surface_Forces_NS(user->dm, G_loc, num_wall, walls, reaction_force));
365   PetscCall(DMRestoreNamedLocalVector(user->dm, "ResidualLocal", &G_loc));
366 
367   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
368 
369   if (iascii) {
370     if (format == PETSC_VIEWER_ASCII_CSV && !user->app_ctx->wall_forces.header_written) {
371       PetscCall(PetscViewerASCIIPrintf(viewer, "Step,Time,Wall,ForceX,ForceY,ForceZ\n"));
372       user->app_ctx->wall_forces.header_written = PETSC_TRUE;
373     }
374     for (PetscInt w = 0; w < num_wall; w++) {
375       PetscInt wall = walls[w];
376       if (format == PETSC_VIEWER_ASCII_CSV) {
377         PetscCall(PetscViewerASCIIPrintf(viewer, "%" PetscInt_FMT ",%g,%" PetscInt_FMT ",%g,%g,%g\n", step_no, time, wall,
378                                          reaction_force[w * dim + 0], reaction_force[w * dim + 1], reaction_force[w * dim + 2]));
379 
380       } else {
381         PetscCall(PetscViewerASCIIPrintf(viewer, "Wall %" PetscInt_FMT " Forces: Force_x = %12g, Force_y = %12g, Force_z = %12g\n", wall,
382                                          reaction_force[w * dim + 0], reaction_force[w * dim + 1], reaction_force[w * dim + 2]));
383       }
384     }
385   }
386   PetscCall(PetscFree(reaction_force));
387   PetscFunctionReturn(PETSC_SUCCESS);
388 }
389 
390 // User provided TS Monitor
391 PetscErrorCode TSMonitor_NS(TS ts, PetscInt step_no, PetscReal time, Vec Q, void *ctx) {
392   User user = ctx;
393 
394   PetscFunctionBeginUser;
395   // Print every 'checkpoint_interval' steps
396   if (user->app_ctx->checkpoint_interval <= 0 || step_no % user->app_ctx->checkpoint_interval != 0 ||
397       (user->app_ctx->cont_steps == step_no && step_no != 0)) {
398     PetscFunctionReturn(PETSC_SUCCESS);
399   }
400 
401   PetscCall(WriteOutput(user, Q, step_no, time));
402   PetscFunctionReturn(PETSC_SUCCESS);
403 }
404 
405 // TS: Create, setup, and solve
406 PetscErrorCode TSSolve_NS(DM dm, User user, AppCtx app_ctx, Physics phys, Vec *Q, PetscScalar *f_time, TS *ts) {
407   MPI_Comm    comm = user->comm;
408   TSAdapt     adapt;
409   PetscScalar final_time;
410 
411   PetscFunctionBeginUser;
412   PetscCall(TSCreate(comm, ts));
413   PetscCall(TSSetDM(*ts, dm));
414   PetscCall(TSSetApplicationContext(*ts, user));
415   if (phys->implicit) {
416     PetscCall(TSSetType(*ts, TSBDF));
417     if (user->op_ifunction) {
418       PetscCall(TSSetIFunction(*ts, NULL, IFunction_NS, &user));
419     } else {  // Implicit integrators can fall back to using an RHSFunction
420       PetscCall(TSSetRHSFunction(*ts, NULL, RHS_NS, &user));
421     }
422     if (user->op_ijacobian) {
423       PetscCall(DMTSSetIJacobian(dm, FormIJacobian_NS, &user));
424       if (app_ctx->amat_type) {
425         Mat Pmat, Amat;
426         PetscCall(DMCreateMatrix(dm, &Pmat));
427         PetscCall(DMSetMatType(dm, app_ctx->amat_type));
428         PetscCall(DMCreateMatrix(dm, &Amat));
429         PetscCall(TSSetIJacobian(*ts, Amat, Pmat, NULL, NULL));
430         PetscCall(MatDestroy(&Amat));
431         PetscCall(MatDestroy(&Pmat));
432       }
433     }
434   } else {
435     PetscCheck(user->op_rhs_ctx, comm, PETSC_ERR_ARG_NULL, "Problem does not provide RHSFunction");
436     PetscCall(TSSetType(*ts, TSRK));
437     PetscCall(TSRKSetType(*ts, TSRK5F));
438     PetscCall(TSSetRHSFunction(*ts, NULL, RHS_NS, &user));
439   }
440   PetscCall(TSSetMaxTime(*ts, 500. * user->units->second));
441   PetscCall(TSSetExactFinalTime(*ts, TS_EXACTFINALTIME_STEPOVER));
442   if (app_ctx->test_type == TESTTYPE_NONE) PetscCall(TSSetErrorIfStepFails(*ts, PETSC_FALSE));
443   PetscCall(TSSetTimeStep(*ts, 1.e-2 * user->units->second));
444   PetscCall(TSGetAdapt(*ts, &adapt));
445   PetscCall(TSAdaptSetStepLimits(adapt, 1.e-12 * user->units->second, 1.e2 * user->units->second));
446   PetscCall(TSSetFromOptions(*ts));
447   user->time_bc_set = -1.0;   // require all BCs be updated
448   if (app_ctx->cont_steps) {  // continue from previous timestep data
449     PetscInt    count;
450     PetscViewer viewer;
451 
452     if (app_ctx->cont_time <= 0) {  // Legacy files did not include step number and time
453       PetscCall(PetscViewerBinaryOpen(comm, app_ctx->cont_time_file, FILE_MODE_READ, &viewer));
454       PetscCall(PetscViewerBinaryRead(viewer, &app_ctx->cont_time, 1, &count, PETSC_REAL));
455       PetscCall(PetscViewerDestroy(&viewer));
456       PetscCheck(app_ctx->cont_steps != -1, comm, PETSC_ERR_ARG_INCOMP,
457                  "-continue step number not specified, but checkpoint file does not contain a step number (likely written by older code version)");
458     }
459     PetscCall(TSSetTime(*ts, app_ctx->cont_time * user->units->second));
460     PetscCall(TSSetStepNumber(*ts, app_ctx->cont_steps));
461   }
462   if (app_ctx->test_type == TESTTYPE_NONE) {
463     PetscCall(TSMonitorSet(*ts, TSMonitor_NS, user, NULL));
464   }
465   if (app_ctx->wall_forces.viewer) {
466     PetscCall(TSMonitorSet(*ts, TSMonitor_WallForce, user, NULL));
467   }
468   if (app_ctx->turb_spanstats_enable) {
469     PetscCall(TSMonitorSet(*ts, TSMonitor_TurbulenceStatistics, user, NULL));
470     CeedScalar previous_time = app_ctx->cont_time * user->units->second;
471     PetscCallCeed(user->ceed,
472                   CeedOperatorSetContextDouble(user->spanstats.op_stats_collect_ctx->op, user->spanstats.previous_time_label, &previous_time));
473   }
474   if (app_ctx->diff_filter_monitor) PetscCall(TSMonitorSet(*ts, TSMonitor_DifferentialFilter, user, NULL));
475 
476   if (app_ctx->sgs_train_enable) {
477     PetscCall(TSMonitorSet(*ts, TSMonitor_SGS_DD_Training, user, NULL));
478     PetscCall(TSSetPostStep(*ts, TSPostStep_SGS_DD_Training));
479   }
480   // Solve
481   PetscReal start_time;
482   PetscInt  start_step;
483   PetscCall(TSGetTime(*ts, &start_time));
484   PetscCall(TSGetStepNumber(*ts, &start_step));
485 
486   PetscCall(PetscLogDefaultBegin());  // So we can use PetscLogStageGetPerfInfo without -log_view
487   PetscPreLoadBegin(PETSC_FALSE, "Fluids Solve");
488   PetscCall(TSSetTime(*ts, start_time));
489   PetscCall(TSSetStepNumber(*ts, start_step));
490   if (PetscPreLoadingOn) {
491     // LCOV_EXCL_START
492     SNES      snes;
493     Vec       Q_preload;
494     PetscReal rtol;
495     PetscCall(VecDuplicate(*Q, &Q_preload));
496     PetscCall(VecCopy(*Q, Q_preload));
497     PetscCall(TSGetSNES(*ts, &snes));
498     PetscCall(SNESGetTolerances(snes, NULL, &rtol, NULL, NULL, NULL));
499     PetscCall(SNESSetTolerances(snes, PETSC_DEFAULT, .99, PETSC_DEFAULT, PETSC_DEFAULT, PETSC_DEFAULT));
500     PetscCall(TSSetSolution(*ts, Q_preload));
501     PetscCall(TSStep(*ts));
502     PetscCall(SNESSetTolerances(snes, PETSC_DEFAULT, rtol, PETSC_DEFAULT, PETSC_DEFAULT, PETSC_DEFAULT));
503     PetscCall(VecDestroy(&Q_preload));
504     // LCOV_EXCL_STOP
505   } else {
506     PetscCall(PetscBarrier((PetscObject)*ts));
507     PetscCall(TSSolve(*ts, *Q));
508   }
509   PetscPreLoadEnd();
510 
511   PetscCall(TSGetSolveTime(*ts, &final_time));
512   *f_time = final_time;
513 
514   if (app_ctx->test_type == TESTTYPE_NONE) {
515     PetscInt step_no;
516     PetscCall(TSGetStepNumber(*ts, &step_no));
517     if (user->app_ctx->checkpoint_interval > 0 || user->app_ctx->checkpoint_interval == -1) {
518       PetscCall(WriteOutput(user, *Q, step_no, final_time));
519     }
520 
521     PetscLogStage      stage_id;
522     PetscEventPerfInfo stage_perf;
523 
524     PetscCall(PetscLogStageGetId("Fluids Solve", &stage_id));
525     PetscCall(PetscLogStageGetPerfInfo(stage_id, &stage_perf));
526     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Time taken for solution (sec): %g\n", stage_perf.time));
527   }
528   PetscFunctionReturn(PETSC_SUCCESS);
529 }
530