xref: /honee/src/setupts.c (revision f3fcf8f4d655b5823e7b7ecc2693c84c75480c9a)
1 // Copyright (c) 2017-2022, Lawrence Livermore National Security, LLC and other CEED contributors.
2 // All Rights Reserved. See the top-level LICENSE and NOTICE files for details.
3 //
4 // SPDX-License-Identifier: BSD-2-Clause
5 //
6 // This file is part of CEED:  http://github.com/ceed
7 
8 /// @file
9 /// Time-stepping functions for Navier-Stokes example using PETSc
10 
11 #include <ceed.h>
12 #include <petscdmplex.h>
13 #include <petscts.h>
14 
15 #include "../navierstokes.h"
16 #include "../qfunctions/newtonian_state.h"
17 
18 // Compute mass matrix for explicit scheme
19 PetscErrorCode ComputeLumpedMassMatrix(Ceed ceed, DM dm, CeedData ceed_data, Vec M) {
20   CeedQFunction        qf_mass;
21   CeedOperator         op_mass;
22   OperatorApplyContext op_mass_ctx;
23   Vec                  Ones_loc;
24   CeedInt              num_comp_q, q_data_size;
25   PetscFunctionBeginUser;
26 
27   // CEED Restriction
28   PetscCallCeed(ceed, CeedElemRestrictionGetNumComponents(ceed_data->elem_restr_q, &num_comp_q));
29   PetscCallCeed(ceed, CeedElemRestrictionGetNumComponents(ceed_data->elem_restr_qd_i, &q_data_size));
30 
31   // CEED QFunction
32   PetscCall(CreateMassQFunction(ceed, num_comp_q, q_data_size, &qf_mass));
33 
34   // CEED Operator
35   PetscCallCeed(ceed, CeedOperatorCreate(ceed, qf_mass, NULL, NULL, &op_mass));
36   PetscCallCeed(ceed, CeedOperatorSetField(op_mass, "u", ceed_data->elem_restr_q, ceed_data->basis_q, CEED_VECTOR_ACTIVE));
37   PetscCallCeed(ceed, CeedOperatorSetField(op_mass, "qdata", ceed_data->elem_restr_qd_i, CEED_BASIS_COLLOCATED, ceed_data->q_data));
38   PetscCallCeed(ceed, CeedOperatorSetField(op_mass, "v", ceed_data->elem_restr_q, ceed_data->basis_q, CEED_VECTOR_ACTIVE));
39 
40   PetscCall(OperatorApplyContextCreate(NULL, dm, ceed, op_mass, NULL, NULL, NULL, NULL, &op_mass_ctx));
41 
42   PetscCall(DMGetLocalVector(dm, &Ones_loc));
43   PetscCall(VecSet(Ones_loc, 1));
44   PetscCall(ApplyCeedOperatorLocalToGlobal(Ones_loc, M, op_mass_ctx));
45 
46   // Invert diagonally lumped mass vector for RHS function
47   PetscCall(VecReciprocal(M));
48 
49   // Cleanup
50   PetscCall(OperatorApplyContextDestroy(op_mass_ctx));
51   PetscCall(DMRestoreLocalVector(dm, &Ones_loc));
52   PetscCallCeed(ceed, CeedQFunctionDestroy(&qf_mass));
53   PetscCallCeed(ceed, CeedOperatorDestroy(&op_mass));
54 
55   PetscFunctionReturn(PETSC_SUCCESS);
56 }
57 
58 // Insert Boundary values if it's a new time
59 PetscErrorCode UpdateBoundaryValues(User user, Vec Q_loc, PetscReal t) {
60   PetscFunctionBeginUser;
61   if (user->time_bc_set != t) {
62     PetscCall(DMPlexInsertBoundaryValues(user->dm, PETSC_TRUE, Q_loc, t, NULL, NULL, NULL));
63     user->time_bc_set = t;
64   }
65   PetscFunctionReturn(PETSC_SUCCESS);
66 }
67 
68 // @brief Update the context label value to new value if necessary.
69 // @note This only supports labels with scalar label values (ie. not arrays)
70 PetscErrorCode UpdateContextLabel(Ceed ceed, MPI_Comm comm, PetscScalar update_value, CeedOperator op, CeedContextFieldLabel label) {
71   PetscScalar label_value;
72 
73   PetscFunctionBeginUser;
74   PetscCheck(label, comm, PETSC_ERR_ARG_BADPTR, "Label should be non-NULL");
75 
76   {
77     size_t             num_elements;
78     const PetscScalar *label_values;
79     PetscCallCeed(ceed, CeedOperatorGetContextDoubleRead(op, label, &num_elements, &label_values));
80     PetscCheck(num_elements == 1, comm, PETSC_ERR_SUP, "%s does not support labels with more than 1 value. Label has %zu values", __func__,
81                num_elements);
82     label_value = *label_values;
83     PetscCallCeed(ceed, CeedOperatorRestoreContextDoubleRead(op, label, &label_values));
84   }
85 
86   if (label_value != update_value) {
87     PetscCallCeed(ceed, CeedOperatorSetContextDouble(op, label, &update_value));
88   }
89   PetscFunctionReturn(PETSC_SUCCESS);
90 }
91 
92 // RHS (Explicit time-stepper) function setup
93 //   This is the RHS of the ODE, given as u_t = G(t,u)
94 //   This function takes in a state vector Q and writes into G
95 PetscErrorCode RHS_NS(TS ts, PetscReal t, Vec Q, Vec G, void *user_data) {
96   User        user = *(User *)user_data;
97   MPI_Comm    comm = PetscObjectComm((PetscObject)ts);
98   PetscScalar dt;
99   Vec         Q_loc = user->Q_loc;
100   PetscFunctionBeginUser;
101 
102   // Update time dependent data
103   PetscCall(UpdateBoundaryValues(user, Q_loc, t));
104   if (user->phys->solution_time_label) PetscCall(UpdateContextLabel(user->ceed, comm, t, user->op_rhs_ctx->op, user->phys->solution_time_label));
105   PetscCall(TSGetTimeStep(ts, &dt));
106   if (user->phys->timestep_size_label) PetscCall(UpdateContextLabel(user->ceed, comm, dt, user->op_rhs_ctx->op, user->phys->timestep_size_label));
107 
108   PetscCall(ApplyCeedOperatorGlobalToGlobal(Q, G, user->op_rhs_ctx));
109 
110   // Inverse of the lumped mass matrix
111   PetscCall(VecPointwiseMult(G, G, user->M_inv));
112 
113   PetscFunctionReturn(PETSC_SUCCESS);
114 }
115 
116 // Surface forces function setup
117 static PetscErrorCode Surface_Forces_NS(DM dm, Vec G_loc, PetscInt num_walls, const PetscInt walls[], PetscScalar *reaction_force) {
118   DMLabel            face_label;
119   const PetscScalar *g;
120   PetscInt           dof, dim = 3;
121   MPI_Comm           comm;
122   PetscSection       s;
123 
124   PetscFunctionBeginUser;
125   PetscCall(PetscArrayzero(reaction_force, num_walls * dim));
126   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
127   PetscCall(DMGetLabel(dm, "Face Sets", &face_label));
128   PetscCall(VecGetArrayRead(G_loc, &g));
129   for (PetscInt w = 0; w < num_walls; w++) {
130     const PetscInt wall = walls[w];
131     IS             wall_is;
132     PetscCall(DMGetLocalSection(dm, &s));
133     PetscCall(DMLabelGetStratumIS(face_label, wall, &wall_is));
134     if (wall_is) {  // There exist such points on this process
135       PetscInt        num_points;
136       PetscInt        num_comp = 0;
137       const PetscInt *points;
138       PetscCall(PetscSectionGetFieldComponents(s, 0, &num_comp));
139       PetscCall(ISGetSize(wall_is, &num_points));
140       PetscCall(ISGetIndices(wall_is, &points));
141       for (PetscInt i = 0; i < num_points; i++) {
142         const PetscInt           p = points[i];
143         const StateConservative *r;
144         PetscCall(DMPlexPointLocalRead(dm, p, g, &r));
145         PetscCall(PetscSectionGetDof(s, p, &dof));
146         for (PetscInt node = 0; node < dof / num_comp; node++) {
147           for (PetscInt j = 0; j < 3; j++) {
148             reaction_force[w * dim + j] -= r[node].momentum[j];
149           }
150         }
151       }
152       PetscCall(ISRestoreIndices(wall_is, &points));
153     }
154     PetscCall(ISDestroy(&wall_is));
155   }
156   PetscCallMPI(MPI_Allreduce(MPI_IN_PLACE, reaction_force, dim * num_walls, MPIU_SCALAR, MPI_SUM, comm));
157   //  Restore Vectors
158   PetscCall(VecRestoreArrayRead(G_loc, &g));
159 
160   PetscFunctionReturn(PETSC_SUCCESS);
161 }
162 
163 // Implicit time-stepper function setup
164 PetscErrorCode IFunction_NS(TS ts, PetscReal t, Vec Q, Vec Q_dot, Vec G, void *user_data) {
165   User         user = *(User *)user_data;
166   MPI_Comm     comm = PetscObjectComm((PetscObject)ts);
167   PetscScalar  dt;
168   Vec          Q_loc = user->Q_loc, Q_dot_loc = user->Q_dot_loc, G_loc;
169   PetscMemType q_mem_type, q_dot_mem_type, g_mem_type;
170   PetscFunctionBeginUser;
171 
172   // Get local vectors
173   PetscCall(DMGetNamedLocalVector(user->dm, "ResidualLocal", &G_loc));
174 
175   // Update time dependent data
176   PetscCall(UpdateBoundaryValues(user, Q_loc, t));
177   if (user->phys->solution_time_label) PetscCall(UpdateContextLabel(user->ceed, comm, t, user->op_ifunction, user->phys->solution_time_label));
178   PetscCall(TSGetTimeStep(ts, &dt));
179   if (user->phys->timestep_size_label) PetscCall(UpdateContextLabel(user->ceed, comm, dt, user->op_ifunction, user->phys->timestep_size_label));
180 
181   // Global-to-local
182   PetscCall(DMGlobalToLocalBegin(user->dm, Q, INSERT_VALUES, Q_loc));
183   PetscCall(DMGlobalToLocalBegin(user->dm, Q_dot, INSERT_VALUES, Q_dot_loc));
184   PetscCall(DMGlobalToLocalEnd(user->dm, Q, INSERT_VALUES, Q_loc));
185   PetscCall(DMGlobalToLocalEnd(user->dm, Q_dot, INSERT_VALUES, Q_dot_loc));
186 
187   // Place PETSc vectors in CEED vectors
188   PetscCall(VecReadP2C(Q_loc, &q_mem_type, user->q_ceed));
189   PetscCall(VecReadP2C(Q_dot_loc, &q_dot_mem_type, user->q_dot_ceed));
190   PetscCall(VecP2C(G_loc, &g_mem_type, user->g_ceed));
191 
192   // Apply CEED operator
193   PetscCall(PetscLogEventBegin(FLUIDS_CeedOperatorApply, Q, G, 0, 0));
194   PetscCall(PetscLogGpuTimeBegin());
195   PetscCallCeed(user->ceed, CeedOperatorApply(user->op_ifunction, user->q_ceed, user->g_ceed, CEED_REQUEST_IMMEDIATE));
196   PetscCall(PetscLogGpuTimeEnd());
197   PetscCall(PetscLogEventEnd(FLUIDS_CeedOperatorApply, Q, G, 0, 0));
198 
199   // Restore vectors
200   PetscCall(VecReadC2P(user->q_ceed, q_mem_type, Q_loc));
201   PetscCall(VecReadC2P(user->q_dot_ceed, q_dot_mem_type, Q_dot_loc));
202   PetscCall(VecC2P(user->g_ceed, g_mem_type, G_loc));
203 
204   if (user->app_ctx->sgs_model_type == SGS_MODEL_DATA_DRIVEN) {
205     PetscCall(SgsDDModelApplyIFunction(user, Q_loc, G_loc));
206   }
207 
208   // Local-to-Global
209   PetscCall(VecZeroEntries(G));
210   PetscCall(DMLocalToGlobal(user->dm, G_loc, ADD_VALUES, G));
211 
212   // Restore vectors
213   PetscCall(DMRestoreNamedLocalVector(user->dm, "ResidualLocal", &G_loc));
214 
215   PetscFunctionReturn(PETSC_SUCCESS);
216 }
217 
218 static PetscErrorCode FormPreallocation(User user, PetscBool pbdiagonal, Mat J, CeedVector *coo_values) {
219   PetscCount ncoo;
220   PetscInt  *rows_petsc, *cols_petsc;
221 
222   PetscFunctionBeginUser;
223   if (pbdiagonal) {
224     CeedSize l_size;
225     PetscCallCeed(user->ceed, CeedOperatorGetActiveVectorLengths(user->op_ijacobian, &l_size, NULL));
226     ncoo       = l_size * 5;
227     rows_petsc = malloc(ncoo * sizeof(rows_petsc));
228     cols_petsc = malloc(ncoo * sizeof(cols_petsc));
229     for (PetscCount n = 0; n < l_size / 5; n++) {
230       for (PetscInt i = 0; i < 5; i++) {
231         for (PetscInt j = 0; j < 5; j++) {
232           rows_petsc[(n * 5 + i) * 5 + j] = n * 5 + i;
233           cols_petsc[(n * 5 + i) * 5 + j] = n * 5 + j;
234         }
235       }
236     }
237   } else {
238     CeedInt *rows_ceed, *cols_ceed;
239     PetscCallCeed(user->ceed, CeedOperatorLinearAssembleSymbolic(user->op_ijacobian, &ncoo, &rows_ceed, &cols_ceed));
240     PetscCall(IntArrayC2P(ncoo, &rows_ceed, &rows_petsc));
241     PetscCall(IntArrayC2P(ncoo, &cols_ceed, &cols_petsc));
242   }
243   PetscCall(MatSetPreallocationCOOLocal(J, ncoo, rows_petsc, cols_petsc));
244   free(rows_petsc);
245   free(cols_petsc);
246   PetscCallCeed(user->ceed, CeedVectorCreate(user->ceed, ncoo, coo_values));
247   PetscFunctionReturn(PETSC_SUCCESS);
248 }
249 
250 static PetscErrorCode FormSetValues(User user, PetscBool pbdiagonal, Mat J, CeedVector coo_values) {
251   CeedMemType        mem_type = CEED_MEM_HOST;
252   const PetscScalar *values;
253   MatType            mat_type;
254 
255   PetscFunctionBeginUser;
256   PetscCall(MatGetType(J, &mat_type));
257   if (strstr(mat_type, "kokkos") || strstr(mat_type, "cusparse")) mem_type = CEED_MEM_DEVICE;
258   if (pbdiagonal) {
259     PetscCall(PetscLogEventBegin(FLUIDS_CeedOperatorAssemblePointBlockDiagonal, J, 0, 0, 0));
260     PetscCall(PetscLogGpuTimeBegin());
261     PetscCallCeed(user->ceed, CeedOperatorLinearAssemblePointBlockDiagonal(user->op_ijacobian, coo_values, CEED_REQUEST_IMMEDIATE));
262     PetscCall(PetscLogGpuTimeEnd());
263     PetscCall(PetscLogEventEnd(FLUIDS_CeedOperatorAssemblePointBlockDiagonal, J, 0, 0, 0));
264   } else {
265     PetscCall(PetscLogEventBegin(FLUIDS_CeedOperatorAssemble, J, 0, 0, 0));
266     PetscCall(PetscLogGpuTimeBegin());
267     PetscCallCeed(user->ceed, CeedOperatorLinearAssemble(user->op_ijacobian, coo_values));
268     PetscCall(PetscLogGpuTimeEnd());
269     PetscCall(PetscLogEventEnd(FLUIDS_CeedOperatorAssemble, J, 0, 0, 0));
270   }
271   PetscCallCeed(user->ceed, CeedVectorGetArrayRead(coo_values, mem_type, &values));
272   PetscCall(MatSetValuesCOO(J, values, INSERT_VALUES));
273   PetscCallCeed(user->ceed, CeedVectorRestoreArrayRead(coo_values, &values));
274   PetscFunctionReturn(PETSC_SUCCESS);
275 }
276 
277 PetscErrorCode FormIJacobian_NS(TS ts, PetscReal t, Vec Q, Vec Q_dot, PetscReal shift, Mat J, Mat J_pre, void *user_data) {
278   User      user = *(User *)user_data;
279   Ceed      ceed = user->ceed;
280   PetscBool J_is_shell, J_is_mffd, J_pre_is_shell;
281   PetscFunctionBeginUser;
282   if (user->phys->ijacobian_time_shift_label)
283     PetscCallCeed(ceed, CeedOperatorSetContextDouble(user->op_ijacobian, user->phys->ijacobian_time_shift_label, &shift));
284   PetscCall(PetscObjectTypeCompare((PetscObject)J, MATMFFD, &J_is_mffd));
285   PetscCall(PetscObjectTypeCompare((PetscObject)J, MATSHELL, &J_is_shell));
286   PetscCall(PetscObjectTypeCompare((PetscObject)J_pre, MATSHELL, &J_pre_is_shell));
287   if (!user->matrices_set_up) {
288     if (J_is_shell) {
289       OperatorApplyContext op_ijacobian_ctx;
290       OperatorApplyContextCreate(user->dm, user->dm, user->ceed, user->op_ijacobian, user->q_ceed, user->g_ceed, user->Q_dot_loc, NULL,
291                                  &op_ijacobian_ctx);
292       PetscCall(MatShellSetContext(J, op_ijacobian_ctx));
293       PetscCall(MatShellSetContextDestroy(J, (PetscErrorCode(*)(void *))OperatorApplyContextDestroy));
294       PetscCall(MatShellSetOperation(J, MATOP_MULT, (void (*)(void))MatMult_Ceed));
295       PetscCall(MatShellSetOperation(J, MATOP_GET_DIAGONAL, (void (*)(void))MatGetDiag_Ceed));
296       PetscCall(MatSetUp(J));
297     }
298     if (!J_pre_is_shell) {
299       PetscCall(FormPreallocation(user, user->app_ctx->pmat_pbdiagonal, J_pre, &user->coo_values_pmat));
300     }
301     if (J != J_pre && !J_is_shell && !J_is_mffd) {
302       PetscCall(FormPreallocation(user, PETSC_FALSE, J, &user->coo_values_amat));
303     }
304     user->matrices_set_up = true;
305   }
306   if (!J_pre_is_shell) {
307     PetscCall(FormSetValues(user, user->app_ctx->pmat_pbdiagonal, J_pre, user->coo_values_pmat));
308   }
309   if (user->coo_values_amat) {
310     PetscCall(FormSetValues(user, PETSC_FALSE, J, user->coo_values_amat));
311   } else if (J_is_mffd) {
312     PetscCall(MatAssemblyBegin(J, MAT_FINAL_ASSEMBLY));
313     PetscCall(MatAssemblyEnd(J, MAT_FINAL_ASSEMBLY));
314   }
315   PetscFunctionReturn(PETSC_SUCCESS);
316 }
317 
318 PetscErrorCode WriteOutput(User user, Vec Q, PetscInt step_no, PetscScalar time) {
319   Vec         Q_loc;
320   char        file_path[PETSC_MAX_PATH_LEN];
321   PetscViewer viewer;
322   PetscFunctionBeginUser;
323 
324   if (user->app_ctx->checkpoint_vtk) {
325     // Set up output
326     PetscCall(DMGetLocalVector(user->dm, &Q_loc));
327     PetscCall(PetscObjectSetName((PetscObject)Q_loc, "StateVec"));
328     PetscCall(VecZeroEntries(Q_loc));
329     PetscCall(DMGlobalToLocal(user->dm, Q, INSERT_VALUES, Q_loc));
330 
331     // Output
332     PetscCall(PetscSNPrintf(file_path, sizeof file_path, "%s/ns-%03" PetscInt_FMT ".vtu", user->app_ctx->output_dir, step_no));
333 
334     PetscCall(PetscViewerVTKOpen(PetscObjectComm((PetscObject)Q), file_path, FILE_MODE_WRITE, &viewer));
335     PetscCall(VecView(Q_loc, viewer));
336     PetscCall(PetscViewerDestroy(&viewer));
337     if (user->dm_viz) {
338       Vec         Q_refined, Q_refined_loc;
339       char        file_path_refined[PETSC_MAX_PATH_LEN];
340       PetscViewer viewer_refined;
341 
342       PetscCall(DMGetGlobalVector(user->dm_viz, &Q_refined));
343       PetscCall(DMGetLocalVector(user->dm_viz, &Q_refined_loc));
344       PetscCall(PetscObjectSetName((PetscObject)Q_refined_loc, "Refined"));
345 
346       PetscCall(MatInterpolate(user->interp_viz, Q, Q_refined));
347       PetscCall(VecZeroEntries(Q_refined_loc));
348       PetscCall(DMGlobalToLocal(user->dm_viz, Q_refined, INSERT_VALUES, Q_refined_loc));
349 
350       PetscCall(
351           PetscSNPrintf(file_path_refined, sizeof file_path_refined, "%s/nsrefined-%03" PetscInt_FMT ".vtu", user->app_ctx->output_dir, step_no));
352 
353       PetscCall(PetscViewerVTKOpen(PetscObjectComm((PetscObject)Q_refined), file_path_refined, FILE_MODE_WRITE, &viewer_refined));
354       PetscCall(VecView(Q_refined_loc, viewer_refined));
355       PetscCall(DMRestoreLocalVector(user->dm_viz, &Q_refined_loc));
356       PetscCall(DMRestoreGlobalVector(user->dm_viz, &Q_refined));
357       PetscCall(PetscViewerDestroy(&viewer_refined));
358     }
359     PetscCall(DMRestoreLocalVector(user->dm, &Q_loc));
360   }
361 
362   // Save data in a binary file for continuation of simulations
363   if (user->app_ctx->add_stepnum2bin) {
364     PetscCall(PetscSNPrintf(file_path, sizeof file_path, "%s/ns-solution-%" PetscInt_FMT ".bin", user->app_ctx->output_dir, step_no));
365   } else {
366     PetscCall(PetscSNPrintf(file_path, sizeof file_path, "%s/ns-solution.bin", user->app_ctx->output_dir));
367   }
368   PetscCall(PetscViewerBinaryOpen(user->comm, file_path, FILE_MODE_WRITE, &viewer));
369 
370   PetscInt32 token = PetscDefined(USE_64BIT_INDICES) ? FLUIDS_FILE_TOKEN_64 : FLUIDS_FILE_TOKEN_32;
371   PetscCall(PetscViewerBinaryWrite(viewer, &token, 1, PETSC_INT32));
372   PetscCall(PetscViewerBinaryWrite(viewer, &step_no, 1, PETSC_INT));
373   time /= user->units->second;  // Dimensionalize time back
374   PetscCall(PetscViewerBinaryWrite(viewer, &time, 1, PETSC_REAL));
375   PetscCall(VecView(Q, viewer));
376   PetscCall(PetscViewerDestroy(&viewer));
377   PetscFunctionReturn(PETSC_SUCCESS);
378 }
379 
380 // CSV Monitor
381 PetscErrorCode TSMonitor_WallForce(TS ts, PetscInt step_no, PetscReal time, Vec Q, void *ctx) {
382   User              user = ctx;
383   Vec               G_loc;
384   PetscInt          num_wall = user->app_ctx->wall_forces.num_wall, dim = 3;
385   const PetscInt   *walls  = user->app_ctx->wall_forces.walls;
386   PetscViewer       viewer = user->app_ctx->wall_forces.viewer;
387   PetscViewerFormat format = user->app_ctx->wall_forces.viewer_format;
388   PetscScalar      *reaction_force;
389   PetscBool         iascii;
390 
391   PetscFunctionBeginUser;
392   if (!viewer) PetscFunctionReturn(PETSC_SUCCESS);
393   PetscCall(DMGetNamedLocalVector(user->dm, "ResidualLocal", &G_loc));
394   PetscCall(PetscMalloc1(num_wall * dim, &reaction_force));
395   PetscCall(Surface_Forces_NS(user->dm, G_loc, num_wall, walls, reaction_force));
396   PetscCall(DMRestoreNamedLocalVector(user->dm, "ResidualLocal", &G_loc));
397 
398   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
399 
400   if (iascii) {
401     if (format == PETSC_VIEWER_ASCII_CSV && !user->app_ctx->wall_forces.header_written) {
402       PetscCall(PetscViewerASCIIPrintf(viewer, "Step,Time,Wall,ForceX,ForceY,ForceZ\n"));
403       user->app_ctx->wall_forces.header_written = PETSC_TRUE;
404     }
405     for (PetscInt w = 0; w < num_wall; w++) {
406       PetscInt wall = walls[w];
407       if (format == PETSC_VIEWER_ASCII_CSV) {
408         PetscCall(PetscViewerASCIIPrintf(viewer, "%" PetscInt_FMT ",%g,%" PetscInt_FMT ",%g,%g,%g\n", step_no, time, wall,
409                                          reaction_force[w * dim + 0], reaction_force[w * dim + 1], reaction_force[w * dim + 2]));
410 
411       } else {
412         PetscCall(PetscViewerASCIIPrintf(viewer, "Wall %" PetscInt_FMT " Forces: Force_x = %12g, Force_y = %12g, Force_z = %12g\n", wall,
413                                          reaction_force[w * dim + 0], reaction_force[w * dim + 1], reaction_force[w * dim + 2]));
414       }
415     }
416   }
417   PetscCall(PetscFree(reaction_force));
418   PetscFunctionReturn(PETSC_SUCCESS);
419 }
420 
421 // User provided TS Monitor
422 PetscErrorCode TSMonitor_NS(TS ts, PetscInt step_no, PetscReal time, Vec Q, void *ctx) {
423   User user = ctx;
424   PetscFunctionBeginUser;
425 
426   // Print every 'checkpoint_interval' steps
427   if (user->app_ctx->checkpoint_interval <= 0 || step_no % user->app_ctx->checkpoint_interval != 0 ||
428       (user->app_ctx->cont_steps == step_no && step_no != 0)) {
429     PetscFunctionReturn(PETSC_SUCCESS);
430   }
431 
432   PetscCall(WriteOutput(user, Q, step_no, time));
433 
434   PetscFunctionReturn(PETSC_SUCCESS);
435 }
436 
437 // TS: Create, setup, and solve
438 PetscErrorCode TSSolve_NS(DM dm, User user, AppCtx app_ctx, Physics phys, Vec *Q, PetscScalar *f_time, TS *ts) {
439   MPI_Comm    comm = user->comm;
440   TSAdapt     adapt;
441   PetscScalar final_time;
442   PetscFunctionBeginUser;
443 
444   PetscCall(TSCreate(comm, ts));
445   PetscCall(TSSetDM(*ts, dm));
446   if (phys->implicit) {
447     PetscCall(TSSetType(*ts, TSBDF));
448     if (user->op_ifunction) {
449       PetscCall(TSSetIFunction(*ts, NULL, IFunction_NS, &user));
450     } else {  // Implicit integrators can fall back to using an RHSFunction
451       PetscCall(TSSetRHSFunction(*ts, NULL, RHS_NS, &user));
452     }
453     if (user->op_ijacobian) {
454       PetscCall(DMTSSetIJacobian(dm, FormIJacobian_NS, &user));
455       if (app_ctx->amat_type) {
456         Mat Pmat, Amat;
457         PetscCall(DMCreateMatrix(dm, &Pmat));
458         PetscCall(DMSetMatType(dm, app_ctx->amat_type));
459         PetscCall(DMCreateMatrix(dm, &Amat));
460         PetscCall(TSSetIJacobian(*ts, Amat, Pmat, NULL, NULL));
461         PetscCall(MatDestroy(&Amat));
462         PetscCall(MatDestroy(&Pmat));
463       }
464     }
465   } else {
466     PetscCheck(user->op_rhs_ctx, comm, PETSC_ERR_ARG_NULL, "Problem does not provide RHSFunction");
467     PetscCall(TSSetType(*ts, TSRK));
468     PetscCall(TSRKSetType(*ts, TSRK5F));
469     PetscCall(TSSetRHSFunction(*ts, NULL, RHS_NS, &user));
470   }
471   PetscCall(TSSetMaxTime(*ts, 500. * user->units->second));
472   PetscCall(TSSetExactFinalTime(*ts, TS_EXACTFINALTIME_STEPOVER));
473   if (app_ctx->test_type == TESTTYPE_NONE) PetscCall(TSSetErrorIfStepFails(*ts, PETSC_FALSE));
474   PetscCall(TSSetTimeStep(*ts, 1.e-2 * user->units->second));
475   if (app_ctx->test_type != TESTTYPE_NONE) {
476     PetscCall(TSSetMaxSteps(*ts, 10));
477   }
478   PetscCall(TSGetAdapt(*ts, &adapt));
479   PetscCall(TSAdaptSetStepLimits(adapt, 1.e-12 * user->units->second, 1.e2 * user->units->second));
480   PetscCall(TSSetFromOptions(*ts));
481   user->time_bc_set = -1.0;   // require all BCs be updated
482   if (app_ctx->cont_steps) {  // continue from previous timestep data
483     PetscInt    count;
484     PetscViewer viewer;
485 
486     if (app_ctx->cont_time <= 0) {  // Legacy files did not include step number and time
487       PetscCall(PetscViewerBinaryOpen(comm, app_ctx->cont_time_file, FILE_MODE_READ, &viewer));
488       PetscCall(PetscViewerBinaryRead(viewer, &app_ctx->cont_time, 1, &count, PETSC_REAL));
489       PetscCall(PetscViewerDestroy(&viewer));
490       PetscCheck(app_ctx->cont_steps != -1, comm, PETSC_ERR_ARG_INCOMP,
491                  "-continue step number not specified, but checkpoint file does not contain a step number (likely written by older code version)");
492     }
493     PetscCall(TSSetTime(*ts, app_ctx->cont_time * user->units->second));
494     PetscCall(TSSetStepNumber(*ts, app_ctx->cont_steps));
495   }
496   if (app_ctx->test_type == TESTTYPE_NONE) {
497     PetscCall(TSMonitorSet(*ts, TSMonitor_NS, user, NULL));
498   }
499   if (app_ctx->wall_forces.viewer) {
500     PetscCall(TSMonitorSet(*ts, TSMonitor_WallForce, user, NULL));
501   }
502   if (app_ctx->turb_spanstats_enable) {
503     PetscCall(TSMonitorSet(*ts, TSMonitor_TurbulenceStatistics, user, NULL));
504     CeedScalar previous_time = app_ctx->cont_time * user->units->second;
505     PetscCallCeed(user->ceed,
506                   CeedOperatorSetContextDouble(user->spanstats.op_stats_collect_ctx->op, user->spanstats.previous_time_label, &previous_time));
507   }
508   if (app_ctx->diff_filter_monitor) PetscCall(TSMonitorSet(*ts, TSMonitor_DifferentialFilter, user, NULL));
509 
510   // Solve
511   PetscReal start_time;
512   PetscInt  start_step;
513   PetscCall(TSGetTime(*ts, &start_time));
514   PetscCall(TSGetStepNumber(*ts, &start_step));
515 
516   PetscCall(PetscLogDefaultBegin());  // So we can use PetscLogStageGetPerfInfo without -log_view
517   PetscPreLoadBegin(PETSC_FALSE, "Fluids Solve");
518   PetscCall(TSSetTime(*ts, start_time));
519   PetscCall(TSSetStepNumber(*ts, start_step));
520   if (PetscPreLoadingOn) {
521     // LCOV_EXCL_START
522     SNES      snes;
523     Vec       Q_preload;
524     PetscReal rtol;
525     PetscCall(VecDuplicate(*Q, &Q_preload));
526     PetscCall(VecCopy(*Q, Q_preload));
527     PetscCall(TSGetSNES(*ts, &snes));
528     PetscCall(SNESGetTolerances(snes, NULL, &rtol, NULL, NULL, NULL));
529     PetscCall(SNESSetTolerances(snes, PETSC_DEFAULT, .99, PETSC_DEFAULT, PETSC_DEFAULT, PETSC_DEFAULT));
530     PetscCall(TSSetSolution(*ts, Q_preload));
531     PetscCall(TSStep(*ts));
532     PetscCall(SNESSetTolerances(snes, PETSC_DEFAULT, rtol, PETSC_DEFAULT, PETSC_DEFAULT, PETSC_DEFAULT));
533     PetscCall(VecDestroy(&Q_preload));
534     // LCOV_EXCL_STOP
535   } else {
536     PetscCall(PetscBarrier((PetscObject)*ts));
537     PetscCall(TSSolve(*ts, *Q));
538   }
539   PetscPreLoadEnd();
540 
541   PetscCall(TSGetSolveTime(*ts, &final_time));
542   *f_time = final_time;
543 
544   if (app_ctx->test_type == TESTTYPE_NONE) {
545     PetscInt step_no;
546     PetscCall(TSGetStepNumber(*ts, &step_no));
547     if (user->app_ctx->checkpoint_interval > 0 || user->app_ctx->checkpoint_interval == -1) {
548       PetscCall(WriteOutput(user, *Q, step_no, final_time));
549     }
550 
551     PetscLogStage      stage_id;
552     PetscEventPerfInfo stage_perf;
553 
554     PetscCall(PetscLogStageGetId("Fluids Solve", &stage_id));
555     PetscCall(PetscLogStageGetPerfInfo(stage_id, &stage_perf));
556     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Time taken for solution (sec): %g\n", stage_perf.time));
557   }
558   PetscFunctionReturn(PETSC_SUCCESS);
559 }
560