xref: /libCEED/examples/fluids/src/setupts.c (revision e17e35bb87e06e9a29be52e929d971570655a29f)
1 // Copyright (c) 2017-2022, Lawrence Livermore National Security, LLC and other CEED contributors.
2 // All Rights Reserved. See the top-level LICENSE and NOTICE files for details.
3 //
4 // SPDX-License-Identifier: BSD-2-Clause
5 //
6 // This file is part of CEED:  http://github.com/ceed
7 
8 /// @file
9 /// Time-stepping functions for Navier-Stokes example using PETSc
10 
11 #include <ceed.h>
12 #include <petscdmplex.h>
13 #include <petscts.h>
14 
15 #include "../navierstokes.h"
16 #include "../qfunctions/newtonian_state.h"
17 
18 // Compute mass matrix for explicit scheme
19 PetscErrorCode ComputeLumpedMassMatrix(Ceed ceed, DM dm, CeedData ceed_data, Vec M) {
20   CeedQFunction        qf_mass;
21   CeedOperator         op_mass;
22   OperatorApplyContext op_mass_ctx;
23   Vec                  Ones_loc;
24   CeedInt              num_comp_q, q_data_size;
25 
26   PetscFunctionBeginUser;
27   PetscCallCeed(ceed, CeedElemRestrictionGetNumComponents(ceed_data->elem_restr_q, &num_comp_q));
28   PetscCallCeed(ceed, CeedElemRestrictionGetNumComponents(ceed_data->elem_restr_qd_i, &q_data_size));
29 
30   PetscCall(CreateMassQFunction(ceed, num_comp_q, q_data_size, &qf_mass));
31   PetscCallCeed(ceed, CeedOperatorCreate(ceed, qf_mass, NULL, NULL, &op_mass));
32   PetscCallCeed(ceed, CeedOperatorSetField(op_mass, "u", ceed_data->elem_restr_q, ceed_data->basis_q, CEED_VECTOR_ACTIVE));
33   PetscCallCeed(ceed, CeedOperatorSetField(op_mass, "qdata", ceed_data->elem_restr_qd_i, CEED_BASIS_NONE, ceed_data->q_data));
34   PetscCallCeed(ceed, CeedOperatorSetField(op_mass, "v", ceed_data->elem_restr_q, ceed_data->basis_q, CEED_VECTOR_ACTIVE));
35 
36   PetscCall(OperatorApplyContextCreate(NULL, dm, ceed, op_mass, NULL, NULL, NULL, NULL, &op_mass_ctx));
37 
38   PetscCall(DMGetLocalVector(dm, &Ones_loc));
39   PetscCall(VecSet(Ones_loc, 1));
40   PetscCall(ApplyCeedOperatorLocalToGlobal(Ones_loc, M, op_mass_ctx));
41 
42   // Invert diagonally lumped mass vector for RHS function
43   PetscCall(VecReciprocal(M));
44 
45   // Cleanup
46   PetscCall(OperatorApplyContextDestroy(op_mass_ctx));
47   PetscCall(DMRestoreLocalVector(dm, &Ones_loc));
48   PetscCallCeed(ceed, CeedQFunctionDestroy(&qf_mass));
49   PetscCallCeed(ceed, CeedOperatorDestroy(&op_mass));
50   PetscFunctionReturn(PETSC_SUCCESS);
51 }
52 
53 // Insert Boundary values if it's a new time
54 PetscErrorCode UpdateBoundaryValues(User user, Vec Q_loc, PetscReal t) {
55   PetscFunctionBeginUser;
56   if (user->time_bc_set != t) {
57     PetscCall(DMPlexInsertBoundaryValues(user->dm, PETSC_TRUE, Q_loc, t, NULL, NULL, NULL));
58     user->time_bc_set = t;
59   }
60   PetscFunctionReturn(PETSC_SUCCESS);
61 }
62 
63 // RHS (Explicit time-stepper) function setup
64 //   This is the RHS of the ODE, given as u_t = G(t,u)
65 //   This function takes in a state vector Q and writes into G
66 PetscErrorCode RHS_NS(TS ts, PetscReal t, Vec Q, Vec G, void *user_data) {
67   User        user = *(User *)user_data;
68   Ceed        ceed = user->ceed;
69   PetscScalar dt;
70   Vec         Q_loc = user->Q_loc;
71 
72   PetscFunctionBeginUser;
73   // Update time dependent data
74   PetscCall(UpdateBoundaryValues(user, Q_loc, t));
75   if (user->phys->solution_time_label) PetscCallCeed(ceed, CeedOperatorSetContextDouble(user->op_rhs_ctx->op, user->phys->solution_time_label, &t));
76   PetscCall(TSGetTimeStep(ts, &dt));
77   if (user->phys->timestep_size_label) PetscCallCeed(ceed, CeedOperatorSetContextDouble(user->op_rhs_ctx->op, user->phys->timestep_size_label, &dt));
78 
79   PetscCall(ApplyCeedOperatorGlobalToGlobal(Q, G, user->op_rhs_ctx));
80 
81   // Inverse of the lumped mass matrix
82   PetscCall(VecPointwiseMult(G, G, user->M_inv));
83   PetscFunctionReturn(PETSC_SUCCESS);
84 }
85 
86 // Surface forces function setup
87 static PetscErrorCode Surface_Forces_NS(DM dm, Vec G_loc, PetscInt num_walls, const PetscInt walls[], PetscScalar *reaction_force) {
88   DMLabel            face_label;
89   const PetscScalar *g;
90   PetscInt           dof, dim = 3;
91   MPI_Comm           comm;
92   PetscSection       s;
93 
94   PetscFunctionBeginUser;
95   PetscCall(PetscArrayzero(reaction_force, num_walls * dim));
96   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
97   PetscCall(DMGetLabel(dm, "Face Sets", &face_label));
98   PetscCall(VecGetArrayRead(G_loc, &g));
99   for (PetscInt w = 0; w < num_walls; w++) {
100     const PetscInt wall = walls[w];
101     IS             wall_is;
102     PetscCall(DMGetLocalSection(dm, &s));
103     PetscCall(DMLabelGetStratumIS(face_label, wall, &wall_is));
104     if (wall_is) {  // There exist such points on this process
105       PetscInt        num_points;
106       PetscInt        num_comp = 0;
107       const PetscInt *points;
108       PetscCall(PetscSectionGetFieldComponents(s, 0, &num_comp));
109       PetscCall(ISGetSize(wall_is, &num_points));
110       PetscCall(ISGetIndices(wall_is, &points));
111       for (PetscInt i = 0; i < num_points; i++) {
112         const PetscInt           p = points[i];
113         const StateConservative *r;
114         PetscCall(DMPlexPointLocalRead(dm, p, g, &r));
115         PetscCall(PetscSectionGetDof(s, p, &dof));
116         for (PetscInt node = 0; node < dof / num_comp; node++) {
117           for (PetscInt j = 0; j < 3; j++) {
118             reaction_force[w * dim + j] -= r[node].momentum[j];
119           }
120         }
121       }
122       PetscCall(ISRestoreIndices(wall_is, &points));
123     }
124     PetscCall(ISDestroy(&wall_is));
125   }
126   PetscCallMPI(MPI_Allreduce(MPI_IN_PLACE, reaction_force, dim * num_walls, MPIU_SCALAR, MPI_SUM, comm));
127   //  Restore Vectors
128   PetscCall(VecRestoreArrayRead(G_loc, &g));
129   PetscFunctionReturn(PETSC_SUCCESS);
130 }
131 
132 // Implicit time-stepper function setup
133 PetscErrorCode IFunction_NS(TS ts, PetscReal t, Vec Q, Vec Q_dot, Vec G, void *user_data) {
134   User         user = *(User *)user_data;
135   Ceed         ceed = user->ceed;
136   PetscScalar  dt;
137   Vec          Q_loc = user->Q_loc, Q_dot_loc = user->Q_dot_loc, G_loc;
138   PetscMemType q_mem_type, q_dot_mem_type, g_mem_type;
139 
140   PetscFunctionBeginUser;
141   // Get local vectors
142   PetscCall(DMGetNamedLocalVector(user->dm, "ResidualLocal", &G_loc));
143 
144   // Update time dependent data
145   PetscCall(UpdateBoundaryValues(user, Q_loc, t));
146   if (user->phys->solution_time_label) PetscCallCeed(ceed, CeedOperatorSetContextDouble(user->op_ifunction, user->phys->solution_time_label, &t));
147   PetscCall(TSGetTimeStep(ts, &dt));
148   if (user->phys->timestep_size_label) PetscCallCeed(ceed, CeedOperatorSetContextDouble(user->op_ifunction, user->phys->timestep_size_label, &dt));
149 
150   // Global-to-local
151   PetscCall(DMGlobalToLocalBegin(user->dm, Q, INSERT_VALUES, Q_loc));
152   PetscCall(DMGlobalToLocalBegin(user->dm, Q_dot, INSERT_VALUES, Q_dot_loc));
153   PetscCall(DMGlobalToLocalEnd(user->dm, Q, INSERT_VALUES, Q_loc));
154   PetscCall(DMGlobalToLocalEnd(user->dm, Q_dot, INSERT_VALUES, Q_dot_loc));
155 
156   // Place PETSc vectors in CEED vectors
157   PetscCall(VecReadP2C(Q_loc, &q_mem_type, user->q_ceed));
158   PetscCall(VecReadP2C(Q_dot_loc, &q_dot_mem_type, user->q_dot_ceed));
159   PetscCall(VecP2C(G_loc, &g_mem_type, user->g_ceed));
160 
161   // Apply CEED operator
162   PetscCall(PetscLogEventBegin(FLUIDS_CeedOperatorApply, Q, G, 0, 0));
163   PetscCall(PetscLogGpuTimeBegin());
164   PetscCallCeed(user->ceed, CeedOperatorApply(user->op_ifunction, user->q_ceed, user->g_ceed, CEED_REQUEST_IMMEDIATE));
165   PetscCall(PetscLogGpuTimeEnd());
166   PetscCall(PetscLogEventEnd(FLUIDS_CeedOperatorApply, Q, G, 0, 0));
167 
168   // Restore vectors
169   PetscCall(VecReadC2P(user->q_ceed, q_mem_type, Q_loc));
170   PetscCall(VecReadC2P(user->q_dot_ceed, q_dot_mem_type, Q_dot_loc));
171   PetscCall(VecC2P(user->g_ceed, g_mem_type, G_loc));
172 
173   if (user->app_ctx->sgs_model_type == SGS_MODEL_DATA_DRIVEN) {
174     PetscCall(SgsDDModelApplyIFunction(user, Q_loc, G_loc));
175   }
176 
177   // Local-to-Global
178   PetscCall(VecZeroEntries(G));
179   PetscCall(DMLocalToGlobal(user->dm, G_loc, ADD_VALUES, G));
180 
181   // Restore vectors
182   PetscCall(DMRestoreNamedLocalVector(user->dm, "ResidualLocal", &G_loc));
183   PetscFunctionReturn(PETSC_SUCCESS);
184 }
185 
186 static PetscErrorCode FormPreallocation(User user, PetscBool pbdiagonal, Mat J, CeedVector *coo_values) {
187   PetscCount ncoo;
188   PetscInt  *rows_petsc, *cols_petsc;
189   CeedInt   *rows_ceed, *cols_ceed;
190 
191   PetscFunctionBeginUser;
192   if (pbdiagonal) {
193     PetscCallCeed(user->ceed, CeedOperatorLinearAssemblePointBlockDiagonalSymbolic(user->op_ijacobian, &ncoo, &rows_ceed, &cols_ceed));
194   } else {
195     PetscCallCeed(user->ceed, CeedOperatorLinearAssembleSymbolic(user->op_ijacobian, &ncoo, &rows_ceed, &cols_ceed));
196   }
197   PetscCall(IntArrayC2P(ncoo, &rows_ceed, &rows_petsc));
198   PetscCall(IntArrayC2P(ncoo, &cols_ceed, &cols_petsc));
199   PetscCall(MatSetPreallocationCOOLocal(J, ncoo, rows_petsc, cols_petsc));
200   free(rows_petsc);
201   free(cols_petsc);
202   PetscCallCeed(user->ceed, CeedVectorCreate(user->ceed, ncoo, coo_values));
203   PetscFunctionReturn(PETSC_SUCCESS);
204 }
205 
206 static PetscErrorCode FormSetValues(User user, PetscBool pbdiagonal, Mat J, CeedVector coo_values) {
207   CeedMemType        mem_type = CEED_MEM_HOST;
208   const PetscScalar *values;
209   MatType            mat_type;
210 
211   PetscFunctionBeginUser;
212   PetscCall(MatGetType(J, &mat_type));
213   if (strstr(mat_type, "kokkos") || strstr(mat_type, "cusparse")) mem_type = CEED_MEM_DEVICE;
214   if (pbdiagonal) {
215     PetscCall(PetscLogEventBegin(FLUIDS_CeedOperatorAssemblePointBlockDiagonal, J, 0, 0, 0));
216     PetscCall(PetscLogGpuTimeBegin());
217     PetscCallCeed(user->ceed, CeedOperatorLinearAssemblePointBlockDiagonal(user->op_ijacobian, coo_values, CEED_REQUEST_IMMEDIATE));
218     PetscCall(PetscLogGpuTimeEnd());
219     PetscCall(PetscLogEventEnd(FLUIDS_CeedOperatorAssemblePointBlockDiagonal, J, 0, 0, 0));
220   } else {
221     PetscCall(PetscLogEventBegin(FLUIDS_CeedOperatorAssemble, J, 0, 0, 0));
222     PetscCall(PetscLogGpuTimeBegin());
223     PetscCallCeed(user->ceed, CeedOperatorLinearAssemble(user->op_ijacobian, coo_values));
224     PetscCall(PetscLogGpuTimeEnd());
225     PetscCall(PetscLogEventEnd(FLUIDS_CeedOperatorAssemble, J, 0, 0, 0));
226   }
227   PetscCallCeed(user->ceed, CeedVectorGetArrayRead(coo_values, mem_type, &values));
228   PetscCall(MatSetValuesCOO(J, values, INSERT_VALUES));
229   PetscCallCeed(user->ceed, CeedVectorRestoreArrayRead(coo_values, &values));
230   PetscFunctionReturn(PETSC_SUCCESS);
231 }
232 
233 PetscErrorCode FormIJacobian_NS(TS ts, PetscReal t, Vec Q, Vec Q_dot, PetscReal shift, Mat J, Mat J_pre, void *user_data) {
234   User      user = *(User *)user_data;
235   Ceed      ceed = user->ceed;
236   PetscBool J_is_shell, J_is_mffd, J_pre_is_shell;
237 
238   PetscFunctionBeginUser;
239   if (user->phys->ijacobian_time_shift_label)
240     PetscCallCeed(ceed, CeedOperatorSetContextDouble(user->op_ijacobian, user->phys->ijacobian_time_shift_label, &shift));
241   PetscCall(PetscObjectTypeCompare((PetscObject)J, MATMFFD, &J_is_mffd));
242   PetscCall(PetscObjectTypeCompare((PetscObject)J, MATSHELL, &J_is_shell));
243   PetscCall(PetscObjectTypeCompare((PetscObject)J_pre, MATSHELL, &J_pre_is_shell));
244   if (!user->matrices_set_up) {
245     if (J_is_shell) {
246       OperatorApplyContext op_ijacobian_ctx;
247       OperatorApplyContextCreate(user->dm, user->dm, user->ceed, user->op_ijacobian, user->q_ceed, user->g_ceed, user->Q_dot_loc, NULL,
248                                  &op_ijacobian_ctx);
249       PetscCall(MatShellSetContext(J, op_ijacobian_ctx));
250       PetscCall(MatShellSetContextDestroy(J, (PetscErrorCode(*)(void *))OperatorApplyContextDestroy));
251       PetscCall(MatShellSetOperation(J, MATOP_MULT, (void (*)(void))MatMult_Ceed));
252       PetscCall(MatShellSetOperation(J, MATOP_GET_DIAGONAL, (void (*)(void))MatGetDiag_Ceed));
253       PetscCall(MatSetUp(J));
254     }
255     if (!J_pre_is_shell) {
256       PetscCall(FormPreallocation(user, user->app_ctx->pmat_pbdiagonal, J_pre, &user->coo_values_pmat));
257     }
258     if (J != J_pre && !J_is_shell && !J_is_mffd) {
259       PetscCall(FormPreallocation(user, PETSC_FALSE, J, &user->coo_values_amat));
260     }
261     user->matrices_set_up = true;
262   }
263   if (!J_pre_is_shell) {
264     PetscCall(FormSetValues(user, user->app_ctx->pmat_pbdiagonal, J_pre, user->coo_values_pmat));
265   }
266   if (user->coo_values_amat) {
267     PetscCall(FormSetValues(user, PETSC_FALSE, J, user->coo_values_amat));
268   } else if (J_is_mffd) {
269     PetscCall(MatAssemblyBegin(J, MAT_FINAL_ASSEMBLY));
270     PetscCall(MatAssemblyEnd(J, MAT_FINAL_ASSEMBLY));
271   }
272   PetscFunctionReturn(PETSC_SUCCESS);
273 }
274 
275 PetscErrorCode WriteOutput(User user, Vec Q, PetscInt step_no, PetscScalar time) {
276   Vec         Q_loc;
277   char        file_path[PETSC_MAX_PATH_LEN];
278   PetscViewer viewer;
279 
280   PetscFunctionBeginUser;
281   if (user->app_ctx->checkpoint_vtk) {
282     // Set up output
283     PetscCall(DMGetLocalVector(user->dm, &Q_loc));
284     PetscCall(PetscObjectSetName((PetscObject)Q_loc, "StateVec"));
285     PetscCall(VecZeroEntries(Q_loc));
286     PetscCall(DMGlobalToLocal(user->dm, Q, INSERT_VALUES, Q_loc));
287 
288     // Output
289     PetscCall(PetscSNPrintf(file_path, sizeof file_path, "%s/ns-%03" PetscInt_FMT ".vtu", user->app_ctx->output_dir, step_no));
290 
291     PetscCall(PetscViewerVTKOpen(PetscObjectComm((PetscObject)Q), file_path, FILE_MODE_WRITE, &viewer));
292     PetscCall(VecView(Q_loc, viewer));
293     PetscCall(PetscViewerDestroy(&viewer));
294     if (user->dm_viz) {
295       Vec         Q_refined, Q_refined_loc;
296       char        file_path_refined[PETSC_MAX_PATH_LEN];
297       PetscViewer viewer_refined;
298 
299       PetscCall(DMGetGlobalVector(user->dm_viz, &Q_refined));
300       PetscCall(DMGetLocalVector(user->dm_viz, &Q_refined_loc));
301       PetscCall(PetscObjectSetName((PetscObject)Q_refined_loc, "Refined"));
302 
303       PetscCall(MatInterpolate(user->interp_viz, Q, Q_refined));
304       PetscCall(VecZeroEntries(Q_refined_loc));
305       PetscCall(DMGlobalToLocal(user->dm_viz, Q_refined, INSERT_VALUES, Q_refined_loc));
306 
307       PetscCall(
308           PetscSNPrintf(file_path_refined, sizeof file_path_refined, "%s/nsrefined-%03" PetscInt_FMT ".vtu", user->app_ctx->output_dir, step_no));
309 
310       PetscCall(PetscViewerVTKOpen(PetscObjectComm((PetscObject)Q_refined), file_path_refined, FILE_MODE_WRITE, &viewer_refined));
311       PetscCall(VecView(Q_refined_loc, viewer_refined));
312       PetscCall(DMRestoreLocalVector(user->dm_viz, &Q_refined_loc));
313       PetscCall(DMRestoreGlobalVector(user->dm_viz, &Q_refined));
314       PetscCall(PetscViewerDestroy(&viewer_refined));
315     }
316     PetscCall(DMRestoreLocalVector(user->dm, &Q_loc));
317   }
318 
319   // Save data in a binary file for continuation of simulations
320   if (user->app_ctx->add_stepnum2bin) {
321     PetscCall(PetscSNPrintf(file_path, sizeof file_path, "%s/ns-solution-%" PetscInt_FMT ".bin", user->app_ctx->output_dir, step_no));
322   } else {
323     PetscCall(PetscSNPrintf(file_path, sizeof file_path, "%s/ns-solution.bin", user->app_ctx->output_dir));
324   }
325   PetscCall(PetscViewerBinaryOpen(user->comm, file_path, FILE_MODE_WRITE, &viewer));
326 
327   PetscInt32 token = PetscDefined(USE_64BIT_INDICES) ? FLUIDS_FILE_TOKEN_64 : FLUIDS_FILE_TOKEN_32;
328   PetscCall(PetscViewerBinaryWrite(viewer, &token, 1, PETSC_INT32));
329   PetscCall(PetscViewerBinaryWrite(viewer, &step_no, 1, PETSC_INT));
330   time /= user->units->second;  // Dimensionalize time back
331   PetscCall(PetscViewerBinaryWrite(viewer, &time, 1, PETSC_REAL));
332   PetscCall(VecView(Q, viewer));
333   PetscCall(PetscViewerDestroy(&viewer));
334   PetscFunctionReturn(PETSC_SUCCESS);
335 }
336 
337 // CSV Monitor
338 PetscErrorCode TSMonitor_WallForce(TS ts, PetscInt step_no, PetscReal time, Vec Q, void *ctx) {
339   User              user = ctx;
340   Vec               G_loc;
341   PetscInt          num_wall = user->app_ctx->wall_forces.num_wall, dim = 3;
342   const PetscInt   *walls  = user->app_ctx->wall_forces.walls;
343   PetscViewer       viewer = user->app_ctx->wall_forces.viewer;
344   PetscViewerFormat format = user->app_ctx->wall_forces.viewer_format;
345   PetscScalar      *reaction_force;
346   PetscBool         iascii;
347 
348   PetscFunctionBeginUser;
349   if (!viewer) PetscFunctionReturn(PETSC_SUCCESS);
350   PetscCall(DMGetNamedLocalVector(user->dm, "ResidualLocal", &G_loc));
351   PetscCall(PetscMalloc1(num_wall * dim, &reaction_force));
352   PetscCall(Surface_Forces_NS(user->dm, G_loc, num_wall, walls, reaction_force));
353   PetscCall(DMRestoreNamedLocalVector(user->dm, "ResidualLocal", &G_loc));
354 
355   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
356 
357   if (iascii) {
358     if (format == PETSC_VIEWER_ASCII_CSV && !user->app_ctx->wall_forces.header_written) {
359       PetscCall(PetscViewerASCIIPrintf(viewer, "Step,Time,Wall,ForceX,ForceY,ForceZ\n"));
360       user->app_ctx->wall_forces.header_written = PETSC_TRUE;
361     }
362     for (PetscInt w = 0; w < num_wall; w++) {
363       PetscInt wall = walls[w];
364       if (format == PETSC_VIEWER_ASCII_CSV) {
365         PetscCall(PetscViewerASCIIPrintf(viewer, "%" PetscInt_FMT ",%g,%" PetscInt_FMT ",%g,%g,%g\n", step_no, time, wall,
366                                          reaction_force[w * dim + 0], reaction_force[w * dim + 1], reaction_force[w * dim + 2]));
367 
368       } else {
369         PetscCall(PetscViewerASCIIPrintf(viewer, "Wall %" PetscInt_FMT " Forces: Force_x = %12g, Force_y = %12g, Force_z = %12g\n", wall,
370                                          reaction_force[w * dim + 0], reaction_force[w * dim + 1], reaction_force[w * dim + 2]));
371       }
372     }
373   }
374   PetscCall(PetscFree(reaction_force));
375   PetscFunctionReturn(PETSC_SUCCESS);
376 }
377 
378 // User provided TS Monitor
379 PetscErrorCode TSMonitor_NS(TS ts, PetscInt step_no, PetscReal time, Vec Q, void *ctx) {
380   User user = ctx;
381 
382   PetscFunctionBeginUser;
383   // Print every 'checkpoint_interval' steps
384   if (user->app_ctx->checkpoint_interval <= 0 || step_no % user->app_ctx->checkpoint_interval != 0 ||
385       (user->app_ctx->cont_steps == step_no && step_no != 0)) {
386     PetscFunctionReturn(PETSC_SUCCESS);
387   }
388 
389   PetscCall(WriteOutput(user, Q, step_no, time));
390   PetscFunctionReturn(PETSC_SUCCESS);
391 }
392 
393 // TS: Create, setup, and solve
394 PetscErrorCode TSSolve_NS(DM dm, User user, AppCtx app_ctx, Physics phys, Vec *Q, PetscScalar *f_time, TS *ts) {
395   MPI_Comm    comm = user->comm;
396   TSAdapt     adapt;
397   PetscScalar final_time;
398 
399   PetscFunctionBeginUser;
400   PetscCall(TSCreate(comm, ts));
401   PetscCall(TSSetDM(*ts, dm));
402   PetscCall(TSSetApplicationContext(*ts, user));
403   if (phys->implicit) {
404     PetscCall(TSSetType(*ts, TSBDF));
405     if (user->op_ifunction) {
406       PetscCall(TSSetIFunction(*ts, NULL, IFunction_NS, &user));
407     } else {  // Implicit integrators can fall back to using an RHSFunction
408       PetscCall(TSSetRHSFunction(*ts, NULL, RHS_NS, &user));
409     }
410     if (user->op_ijacobian) {
411       PetscCall(DMTSSetIJacobian(dm, FormIJacobian_NS, &user));
412       if (app_ctx->amat_type) {
413         Mat Pmat, Amat;
414         PetscCall(DMCreateMatrix(dm, &Pmat));
415         PetscCall(DMSetMatType(dm, app_ctx->amat_type));
416         PetscCall(DMCreateMatrix(dm, &Amat));
417         PetscCall(TSSetIJacobian(*ts, Amat, Pmat, NULL, NULL));
418         PetscCall(MatDestroy(&Amat));
419         PetscCall(MatDestroy(&Pmat));
420       }
421     }
422   } else {
423     PetscCheck(user->op_rhs_ctx, comm, PETSC_ERR_ARG_NULL, "Problem does not provide RHSFunction");
424     PetscCall(TSSetType(*ts, TSRK));
425     PetscCall(TSRKSetType(*ts, TSRK5F));
426     PetscCall(TSSetRHSFunction(*ts, NULL, RHS_NS, &user));
427   }
428   PetscCall(TSSetMaxTime(*ts, 500. * user->units->second));
429   PetscCall(TSSetExactFinalTime(*ts, TS_EXACTFINALTIME_STEPOVER));
430   if (app_ctx->test_type == TESTTYPE_NONE) PetscCall(TSSetErrorIfStepFails(*ts, PETSC_FALSE));
431   PetscCall(TSSetTimeStep(*ts, 1.e-2 * user->units->second));
432   PetscCall(TSGetAdapt(*ts, &adapt));
433   PetscCall(TSAdaptSetStepLimits(adapt, 1.e-12 * user->units->second, 1.e2 * user->units->second));
434   PetscCall(TSSetFromOptions(*ts));
435   user->time_bc_set = -1.0;   // require all BCs be updated
436   if (app_ctx->cont_steps) {  // continue from previous timestep data
437     PetscInt    count;
438     PetscViewer viewer;
439 
440     if (app_ctx->cont_time <= 0) {  // Legacy files did not include step number and time
441       PetscCall(PetscViewerBinaryOpen(comm, app_ctx->cont_time_file, FILE_MODE_READ, &viewer));
442       PetscCall(PetscViewerBinaryRead(viewer, &app_ctx->cont_time, 1, &count, PETSC_REAL));
443       PetscCall(PetscViewerDestroy(&viewer));
444       PetscCheck(app_ctx->cont_steps != -1, comm, PETSC_ERR_ARG_INCOMP,
445                  "-continue step number not specified, but checkpoint file does not contain a step number (likely written by older code version)");
446     }
447     PetscCall(TSSetTime(*ts, app_ctx->cont_time * user->units->second));
448     PetscCall(TSSetStepNumber(*ts, app_ctx->cont_steps));
449   }
450   if (app_ctx->test_type == TESTTYPE_NONE) {
451     PetscCall(TSMonitorSet(*ts, TSMonitor_NS, user, NULL));
452   }
453   if (app_ctx->wall_forces.viewer) {
454     PetscCall(TSMonitorSet(*ts, TSMonitor_WallForce, user, NULL));
455   }
456   if (app_ctx->turb_spanstats_enable) {
457     PetscCall(TSMonitorSet(*ts, TSMonitor_TurbulenceStatistics, user, NULL));
458     CeedScalar previous_time = app_ctx->cont_time * user->units->second;
459     PetscCallCeed(user->ceed,
460                   CeedOperatorSetContextDouble(user->spanstats.op_stats_collect_ctx->op, user->spanstats.previous_time_label, &previous_time));
461   }
462   if (app_ctx->diff_filter_monitor) PetscCall(TSMonitorSet(*ts, TSMonitor_DifferentialFilter, user, NULL));
463 
464   if (app_ctx->sgs_train_enable) {
465     PetscCall(TSMonitorSet(*ts, TSMonitor_SGS_DD_Training, user, NULL));
466     PetscCall(TSSetPostStep(*ts, TSPostStep_SGS_DD_Training));
467   }
468   // Solve
469   PetscReal start_time;
470   PetscInt  start_step;
471   PetscCall(TSGetTime(*ts, &start_time));
472   PetscCall(TSGetStepNumber(*ts, &start_step));
473 
474   PetscCall(PetscLogDefaultBegin());  // So we can use PetscLogStageGetPerfInfo without -log_view
475   PetscPreLoadBegin(PETSC_FALSE, "Fluids Solve");
476   PetscCall(TSSetTime(*ts, start_time));
477   PetscCall(TSSetStepNumber(*ts, start_step));
478   if (PetscPreLoadingOn) {
479     // LCOV_EXCL_START
480     SNES      snes;
481     Vec       Q_preload;
482     PetscReal rtol;
483     PetscCall(VecDuplicate(*Q, &Q_preload));
484     PetscCall(VecCopy(*Q, Q_preload));
485     PetscCall(TSGetSNES(*ts, &snes));
486     PetscCall(SNESGetTolerances(snes, NULL, &rtol, NULL, NULL, NULL));
487     PetscCall(SNESSetTolerances(snes, PETSC_DEFAULT, .99, PETSC_DEFAULT, PETSC_DEFAULT, PETSC_DEFAULT));
488     PetscCall(TSSetSolution(*ts, Q_preload));
489     PetscCall(TSStep(*ts));
490     PetscCall(SNESSetTolerances(snes, PETSC_DEFAULT, rtol, PETSC_DEFAULT, PETSC_DEFAULT, PETSC_DEFAULT));
491     PetscCall(VecDestroy(&Q_preload));
492     // LCOV_EXCL_STOP
493   } else {
494     PetscCall(PetscBarrier((PetscObject)*ts));
495     PetscCall(TSSolve(*ts, *Q));
496   }
497   PetscPreLoadEnd();
498 
499   PetscCall(TSGetSolveTime(*ts, &final_time));
500   *f_time = final_time;
501 
502   if (app_ctx->test_type == TESTTYPE_NONE) {
503     PetscInt step_no;
504     PetscCall(TSGetStepNumber(*ts, &step_no));
505     if (user->app_ctx->checkpoint_interval > 0 || user->app_ctx->checkpoint_interval == -1) {
506       PetscCall(WriteOutput(user, *Q, step_no, final_time));
507     }
508 
509     PetscLogStage      stage_id;
510     PetscEventPerfInfo stage_perf;
511 
512     PetscCall(PetscLogStageGetId("Fluids Solve", &stage_id));
513     PetscCall(PetscLogStageGetPerfInfo(stage_id, &stage_perf));
514     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Time taken for solution (sec): %g\n", stage_perf.time));
515   }
516   PetscFunctionReturn(PETSC_SUCCESS);
517 }
518