xref: /libCEED/examples/fluids/src/setupts.c (revision f17d818ddb5f0fdb5c67183d33cc31a0016906ee)
1 // Copyright (c) 2017-2022, Lawrence Livermore National Security, LLC and other CEED contributors.
2 // All Rights Reserved. See the top-level LICENSE and NOTICE files for details.
3 //
4 // SPDX-License-Identifier: BSD-2-Clause
5 //
6 // This file is part of CEED:  http://github.com/ceed
7 
8 /// @file
9 /// Time-stepping functions for Navier-Stokes example using PETSc
10 
11 #include <ceed.h>
12 #include <petscdmplex.h>
13 #include <petscts.h>
14 
15 #include "../navierstokes.h"
16 #include "../qfunctions/newtonian_state.h"
17 
18 // Compute mass matrix for explicit scheme
19 PetscErrorCode ComputeLumpedMassMatrix(Ceed ceed, DM dm, CeedData ceed_data, Vec M) {
20   CeedQFunction        qf_mass;
21   CeedOperator         op_mass;
22   OperatorApplyContext op_mass_ctx;
23   Vec                  Ones_loc;
24   CeedInt              num_comp_q, q_data_size;
25 
26   PetscFunctionBeginUser;
27   // CEED Restriction
28   PetscCallCeed(ceed, CeedElemRestrictionGetNumComponents(ceed_data->elem_restr_q, &num_comp_q));
29   PetscCallCeed(ceed, CeedElemRestrictionGetNumComponents(ceed_data->elem_restr_qd_i, &q_data_size));
30 
31   // CEED QFunction
32   PetscCall(CreateMassQFunction(ceed, num_comp_q, q_data_size, &qf_mass));
33 
34   // CEED Operator
35   PetscCallCeed(ceed, CeedOperatorCreate(ceed, qf_mass, NULL, NULL, &op_mass));
36   PetscCallCeed(ceed, CeedOperatorSetField(op_mass, "u", ceed_data->elem_restr_q, ceed_data->basis_q, CEED_VECTOR_ACTIVE));
37   PetscCallCeed(ceed, CeedOperatorSetField(op_mass, "qdata", ceed_data->elem_restr_qd_i, CEED_BASIS_COLLOCATED, ceed_data->q_data));
38   PetscCallCeed(ceed, CeedOperatorSetField(op_mass, "v", ceed_data->elem_restr_q, ceed_data->basis_q, CEED_VECTOR_ACTIVE));
39 
40   PetscCall(OperatorApplyContextCreate(NULL, dm, ceed, op_mass, NULL, NULL, NULL, NULL, &op_mass_ctx));
41 
42   PetscCall(DMGetLocalVector(dm, &Ones_loc));
43   PetscCall(VecSet(Ones_loc, 1));
44   PetscCall(ApplyCeedOperatorLocalToGlobal(Ones_loc, M, op_mass_ctx));
45 
46   // Invert diagonally lumped mass vector for RHS function
47   PetscCall(VecReciprocal(M));
48 
49   // Cleanup
50   PetscCall(OperatorApplyContextDestroy(op_mass_ctx));
51   PetscCall(DMRestoreLocalVector(dm, &Ones_loc));
52   PetscCallCeed(ceed, CeedQFunctionDestroy(&qf_mass));
53   PetscCallCeed(ceed, CeedOperatorDestroy(&op_mass));
54   PetscFunctionReturn(PETSC_SUCCESS);
55 }
56 
57 // Insert Boundary values if it's a new time
58 PetscErrorCode UpdateBoundaryValues(User user, Vec Q_loc, PetscReal t) {
59   PetscFunctionBeginUser;
60   if (user->time_bc_set != t) {
61     PetscCall(DMPlexInsertBoundaryValues(user->dm, PETSC_TRUE, Q_loc, t, NULL, NULL, NULL));
62     user->time_bc_set = t;
63   }
64   PetscFunctionReturn(PETSC_SUCCESS);
65 }
66 
67 // @brief Update the context label value to new value if necessary.
68 // @note This only supports labels with scalar label values (ie. not arrays)
69 PetscErrorCode UpdateContextLabel(Ceed ceed, MPI_Comm comm, PetscScalar update_value, CeedOperator op, CeedContextFieldLabel label) {
70   PetscScalar label_value;
71 
72   PetscFunctionBeginUser;
73   PetscCheck(label, comm, PETSC_ERR_ARG_BADPTR, "Label should be non-NULL");
74 
75   {
76     size_t             num_elements;
77     const PetscScalar *label_values;
78     PetscCallCeed(ceed, CeedOperatorGetContextDoubleRead(op, label, &num_elements, &label_values));
79     PetscCheck(num_elements == 1, comm, PETSC_ERR_SUP, "%s does not support labels with more than 1 value. Label has %zu values", __func__,
80                num_elements);
81     label_value = *label_values;
82     PetscCallCeed(ceed, CeedOperatorRestoreContextDoubleRead(op, label, &label_values));
83   }
84 
85   if (label_value != update_value) {
86     PetscCallCeed(ceed, CeedOperatorSetContextDouble(op, label, &update_value));
87   }
88   PetscFunctionReturn(PETSC_SUCCESS);
89 }
90 
91 // RHS (Explicit time-stepper) function setup
92 //   This is the RHS of the ODE, given as u_t = G(t,u)
93 //   This function takes in a state vector Q and writes into G
94 PetscErrorCode RHS_NS(TS ts, PetscReal t, Vec Q, Vec G, void *user_data) {
95   User        user = *(User *)user_data;
96   MPI_Comm    comm = PetscObjectComm((PetscObject)ts);
97   PetscScalar dt;
98   Vec         Q_loc = user->Q_loc;
99 
100   PetscFunctionBeginUser;
101   // Update time dependent data
102   PetscCall(UpdateBoundaryValues(user, Q_loc, t));
103   if (user->phys->solution_time_label) PetscCall(UpdateContextLabel(user->ceed, comm, t, user->op_rhs_ctx->op, user->phys->solution_time_label));
104   PetscCall(TSGetTimeStep(ts, &dt));
105   if (user->phys->timestep_size_label) PetscCall(UpdateContextLabel(user->ceed, comm, dt, user->op_rhs_ctx->op, user->phys->timestep_size_label));
106 
107   PetscCall(ApplyCeedOperatorGlobalToGlobal(Q, G, user->op_rhs_ctx));
108 
109   // Inverse of the lumped mass matrix
110   PetscCall(VecPointwiseMult(G, G, user->M_inv));
111   PetscFunctionReturn(PETSC_SUCCESS);
112 }
113 
114 // Surface forces function setup
115 static PetscErrorCode Surface_Forces_NS(DM dm, Vec G_loc, PetscInt num_walls, const PetscInt walls[], PetscScalar *reaction_force) {
116   DMLabel            face_label;
117   const PetscScalar *g;
118   PetscInt           dof, dim = 3;
119   MPI_Comm           comm;
120   PetscSection       s;
121 
122   PetscFunctionBeginUser;
123   PetscCall(PetscArrayzero(reaction_force, num_walls * dim));
124   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
125   PetscCall(DMGetLabel(dm, "Face Sets", &face_label));
126   PetscCall(VecGetArrayRead(G_loc, &g));
127   for (PetscInt w = 0; w < num_walls; w++) {
128     const PetscInt wall = walls[w];
129     IS             wall_is;
130     PetscCall(DMGetLocalSection(dm, &s));
131     PetscCall(DMLabelGetStratumIS(face_label, wall, &wall_is));
132     if (wall_is) {  // There exist such points on this process
133       PetscInt        num_points;
134       PetscInt        num_comp = 0;
135       const PetscInt *points;
136       PetscCall(PetscSectionGetFieldComponents(s, 0, &num_comp));
137       PetscCall(ISGetSize(wall_is, &num_points));
138       PetscCall(ISGetIndices(wall_is, &points));
139       for (PetscInt i = 0; i < num_points; i++) {
140         const PetscInt           p = points[i];
141         const StateConservative *r;
142         PetscCall(DMPlexPointLocalRead(dm, p, g, &r));
143         PetscCall(PetscSectionGetDof(s, p, &dof));
144         for (PetscInt node = 0; node < dof / num_comp; node++) {
145           for (PetscInt j = 0; j < 3; j++) {
146             reaction_force[w * dim + j] -= r[node].momentum[j];
147           }
148         }
149       }
150       PetscCall(ISRestoreIndices(wall_is, &points));
151     }
152     PetscCall(ISDestroy(&wall_is));
153   }
154   PetscCallMPI(MPI_Allreduce(MPI_IN_PLACE, reaction_force, dim * num_walls, MPIU_SCALAR, MPI_SUM, comm));
155   //  Restore Vectors
156   PetscCall(VecRestoreArrayRead(G_loc, &g));
157   PetscFunctionReturn(PETSC_SUCCESS);
158 }
159 
160 // Implicit time-stepper function setup
161 PetscErrorCode IFunction_NS(TS ts, PetscReal t, Vec Q, Vec Q_dot, Vec G, void *user_data) {
162   User         user = *(User *)user_data;
163   MPI_Comm     comm = PetscObjectComm((PetscObject)ts);
164   PetscScalar  dt;
165   Vec          Q_loc = user->Q_loc, Q_dot_loc = user->Q_dot_loc, G_loc;
166   PetscMemType q_mem_type, q_dot_mem_type, g_mem_type;
167 
168   PetscFunctionBeginUser;
169   // Get local vectors
170   PetscCall(DMGetNamedLocalVector(user->dm, "ResidualLocal", &G_loc));
171 
172   // Update time dependent data
173   PetscCall(UpdateBoundaryValues(user, Q_loc, t));
174   if (user->phys->solution_time_label) PetscCall(UpdateContextLabel(user->ceed, comm, t, user->op_ifunction, user->phys->solution_time_label));
175   PetscCall(TSGetTimeStep(ts, &dt));
176   if (user->phys->timestep_size_label) PetscCall(UpdateContextLabel(user->ceed, comm, dt, user->op_ifunction, user->phys->timestep_size_label));
177 
178   // Global-to-local
179   PetscCall(DMGlobalToLocalBegin(user->dm, Q, INSERT_VALUES, Q_loc));
180   PetscCall(DMGlobalToLocalBegin(user->dm, Q_dot, INSERT_VALUES, Q_dot_loc));
181   PetscCall(DMGlobalToLocalEnd(user->dm, Q, INSERT_VALUES, Q_loc));
182   PetscCall(DMGlobalToLocalEnd(user->dm, Q_dot, INSERT_VALUES, Q_dot_loc));
183 
184   // Place PETSc vectors in CEED vectors
185   PetscCall(VecReadP2C(Q_loc, &q_mem_type, user->q_ceed));
186   PetscCall(VecReadP2C(Q_dot_loc, &q_dot_mem_type, user->q_dot_ceed));
187   PetscCall(VecP2C(G_loc, &g_mem_type, user->g_ceed));
188 
189   // Apply CEED operator
190   PetscCall(PetscLogEventBegin(FLUIDS_CeedOperatorApply, Q, G, 0, 0));
191   PetscCall(PetscLogGpuTimeBegin());
192   PetscCallCeed(user->ceed, CeedOperatorApply(user->op_ifunction, user->q_ceed, user->g_ceed, CEED_REQUEST_IMMEDIATE));
193   PetscCall(PetscLogGpuTimeEnd());
194   PetscCall(PetscLogEventEnd(FLUIDS_CeedOperatorApply, Q, G, 0, 0));
195 
196   // Restore vectors
197   PetscCall(VecReadC2P(user->q_ceed, q_mem_type, Q_loc));
198   PetscCall(VecReadC2P(user->q_dot_ceed, q_dot_mem_type, Q_dot_loc));
199   PetscCall(VecC2P(user->g_ceed, g_mem_type, G_loc));
200 
201   if (user->app_ctx->sgs_model_type == SGS_MODEL_DATA_DRIVEN) {
202     PetscCall(SgsDDModelApplyIFunction(user, Q_loc, G_loc));
203   }
204 
205   // Local-to-Global
206   PetscCall(VecZeroEntries(G));
207   PetscCall(DMLocalToGlobal(user->dm, G_loc, ADD_VALUES, G));
208 
209   // Restore vectors
210   PetscCall(DMRestoreNamedLocalVector(user->dm, "ResidualLocal", &G_loc));
211   PetscFunctionReturn(PETSC_SUCCESS);
212 }
213 
214 static PetscErrorCode FormPreallocation(User user, PetscBool pbdiagonal, Mat J, CeedVector *coo_values) {
215   PetscCount ncoo;
216   PetscInt  *rows_petsc, *cols_petsc;
217 
218   PetscFunctionBeginUser;
219   if (pbdiagonal) {
220     CeedSize l_size;
221     PetscCallCeed(user->ceed, CeedOperatorGetActiveVectorLengths(user->op_ijacobian, &l_size, NULL));
222     ncoo       = l_size * 5;
223     rows_petsc = malloc(ncoo * sizeof(rows_petsc));
224     cols_petsc = malloc(ncoo * sizeof(cols_petsc));
225     for (PetscCount n = 0; n < l_size / 5; n++) {
226       for (PetscInt i = 0; i < 5; i++) {
227         for (PetscInt j = 0; j < 5; j++) {
228           rows_petsc[(n * 5 + i) * 5 + j] = n * 5 + i;
229           cols_petsc[(n * 5 + i) * 5 + j] = n * 5 + j;
230         }
231       }
232     }
233   } else {
234     CeedInt *rows_ceed, *cols_ceed;
235     PetscCallCeed(user->ceed, CeedOperatorLinearAssembleSymbolic(user->op_ijacobian, &ncoo, &rows_ceed, &cols_ceed));
236     PetscCall(IntArrayC2P(ncoo, &rows_ceed, &rows_petsc));
237     PetscCall(IntArrayC2P(ncoo, &cols_ceed, &cols_petsc));
238   }
239   PetscCall(MatSetPreallocationCOOLocal(J, ncoo, rows_petsc, cols_petsc));
240   free(rows_petsc);
241   free(cols_petsc);
242   PetscCallCeed(user->ceed, CeedVectorCreate(user->ceed, ncoo, coo_values));
243   PetscFunctionReturn(PETSC_SUCCESS);
244 }
245 
246 static PetscErrorCode FormSetValues(User user, PetscBool pbdiagonal, Mat J, CeedVector coo_values) {
247   CeedMemType        mem_type = CEED_MEM_HOST;
248   const PetscScalar *values;
249   MatType            mat_type;
250 
251   PetscFunctionBeginUser;
252   PetscCall(MatGetType(J, &mat_type));
253   if (strstr(mat_type, "kokkos") || strstr(mat_type, "cusparse")) mem_type = CEED_MEM_DEVICE;
254   if (pbdiagonal) {
255     PetscCall(PetscLogEventBegin(FLUIDS_CeedOperatorAssemblePointBlockDiagonal, J, 0, 0, 0));
256     PetscCall(PetscLogGpuTimeBegin());
257     PetscCallCeed(user->ceed, CeedOperatorLinearAssemblePointBlockDiagonal(user->op_ijacobian, coo_values, CEED_REQUEST_IMMEDIATE));
258     PetscCall(PetscLogGpuTimeEnd());
259     PetscCall(PetscLogEventEnd(FLUIDS_CeedOperatorAssemblePointBlockDiagonal, J, 0, 0, 0));
260   } else {
261     PetscCall(PetscLogEventBegin(FLUIDS_CeedOperatorAssemble, J, 0, 0, 0));
262     PetscCall(PetscLogGpuTimeBegin());
263     PetscCallCeed(user->ceed, CeedOperatorLinearAssemble(user->op_ijacobian, coo_values));
264     PetscCall(PetscLogGpuTimeEnd());
265     PetscCall(PetscLogEventEnd(FLUIDS_CeedOperatorAssemble, J, 0, 0, 0));
266   }
267   PetscCallCeed(user->ceed, CeedVectorGetArrayRead(coo_values, mem_type, &values));
268   PetscCall(MatSetValuesCOO(J, values, INSERT_VALUES));
269   PetscCallCeed(user->ceed, CeedVectorRestoreArrayRead(coo_values, &values));
270   PetscFunctionReturn(PETSC_SUCCESS);
271 }
272 
273 PetscErrorCode FormIJacobian_NS(TS ts, PetscReal t, Vec Q, Vec Q_dot, PetscReal shift, Mat J, Mat J_pre, void *user_data) {
274   User      user = *(User *)user_data;
275   Ceed      ceed = user->ceed;
276   PetscBool J_is_shell, J_is_mffd, J_pre_is_shell;
277 
278   PetscFunctionBeginUser;
279   if (user->phys->ijacobian_time_shift_label)
280     PetscCallCeed(ceed, CeedOperatorSetContextDouble(user->op_ijacobian, user->phys->ijacobian_time_shift_label, &shift));
281   PetscCall(PetscObjectTypeCompare((PetscObject)J, MATMFFD, &J_is_mffd));
282   PetscCall(PetscObjectTypeCompare((PetscObject)J, MATSHELL, &J_is_shell));
283   PetscCall(PetscObjectTypeCompare((PetscObject)J_pre, MATSHELL, &J_pre_is_shell));
284   if (!user->matrices_set_up) {
285     if (J_is_shell) {
286       OperatorApplyContext op_ijacobian_ctx;
287       OperatorApplyContextCreate(user->dm, user->dm, user->ceed, user->op_ijacobian, user->q_ceed, user->g_ceed, user->Q_dot_loc, NULL,
288                                  &op_ijacobian_ctx);
289       PetscCall(MatShellSetContext(J, op_ijacobian_ctx));
290       PetscCall(MatShellSetContextDestroy(J, (PetscErrorCode(*)(void *))OperatorApplyContextDestroy));
291       PetscCall(MatShellSetOperation(J, MATOP_MULT, (void (*)(void))MatMult_Ceed));
292       PetscCall(MatShellSetOperation(J, MATOP_GET_DIAGONAL, (void (*)(void))MatGetDiag_Ceed));
293       PetscCall(MatSetUp(J));
294     }
295     if (!J_pre_is_shell) {
296       PetscCall(FormPreallocation(user, user->app_ctx->pmat_pbdiagonal, J_pre, &user->coo_values_pmat));
297     }
298     if (J != J_pre && !J_is_shell && !J_is_mffd) {
299       PetscCall(FormPreallocation(user, PETSC_FALSE, J, &user->coo_values_amat));
300     }
301     user->matrices_set_up = true;
302   }
303   if (!J_pre_is_shell) {
304     PetscCall(FormSetValues(user, user->app_ctx->pmat_pbdiagonal, J_pre, user->coo_values_pmat));
305   }
306   if (user->coo_values_amat) {
307     PetscCall(FormSetValues(user, PETSC_FALSE, J, user->coo_values_amat));
308   } else if (J_is_mffd) {
309     PetscCall(MatAssemblyBegin(J, MAT_FINAL_ASSEMBLY));
310     PetscCall(MatAssemblyEnd(J, MAT_FINAL_ASSEMBLY));
311   }
312   PetscFunctionReturn(PETSC_SUCCESS);
313 }
314 
315 PetscErrorCode WriteOutput(User user, Vec Q, PetscInt step_no, PetscScalar time) {
316   Vec         Q_loc;
317   char        file_path[PETSC_MAX_PATH_LEN];
318   PetscViewer viewer;
319 
320   PetscFunctionBeginUser;
321   if (user->app_ctx->checkpoint_vtk) {
322     // Set up output
323     PetscCall(DMGetLocalVector(user->dm, &Q_loc));
324     PetscCall(PetscObjectSetName((PetscObject)Q_loc, "StateVec"));
325     PetscCall(VecZeroEntries(Q_loc));
326     PetscCall(DMGlobalToLocal(user->dm, Q, INSERT_VALUES, Q_loc));
327 
328     // Output
329     PetscCall(PetscSNPrintf(file_path, sizeof file_path, "%s/ns-%03" PetscInt_FMT ".vtu", user->app_ctx->output_dir, step_no));
330 
331     PetscCall(PetscViewerVTKOpen(PetscObjectComm((PetscObject)Q), file_path, FILE_MODE_WRITE, &viewer));
332     PetscCall(VecView(Q_loc, viewer));
333     PetscCall(PetscViewerDestroy(&viewer));
334     if (user->dm_viz) {
335       Vec         Q_refined, Q_refined_loc;
336       char        file_path_refined[PETSC_MAX_PATH_LEN];
337       PetscViewer viewer_refined;
338 
339       PetscCall(DMGetGlobalVector(user->dm_viz, &Q_refined));
340       PetscCall(DMGetLocalVector(user->dm_viz, &Q_refined_loc));
341       PetscCall(PetscObjectSetName((PetscObject)Q_refined_loc, "Refined"));
342 
343       PetscCall(MatInterpolate(user->interp_viz, Q, Q_refined));
344       PetscCall(VecZeroEntries(Q_refined_loc));
345       PetscCall(DMGlobalToLocal(user->dm_viz, Q_refined, INSERT_VALUES, Q_refined_loc));
346 
347       PetscCall(
348           PetscSNPrintf(file_path_refined, sizeof file_path_refined, "%s/nsrefined-%03" PetscInt_FMT ".vtu", user->app_ctx->output_dir, step_no));
349 
350       PetscCall(PetscViewerVTKOpen(PetscObjectComm((PetscObject)Q_refined), file_path_refined, FILE_MODE_WRITE, &viewer_refined));
351       PetscCall(VecView(Q_refined_loc, viewer_refined));
352       PetscCall(DMRestoreLocalVector(user->dm_viz, &Q_refined_loc));
353       PetscCall(DMRestoreGlobalVector(user->dm_viz, &Q_refined));
354       PetscCall(PetscViewerDestroy(&viewer_refined));
355     }
356     PetscCall(DMRestoreLocalVector(user->dm, &Q_loc));
357   }
358 
359   // Save data in a binary file for continuation of simulations
360   if (user->app_ctx->add_stepnum2bin) {
361     PetscCall(PetscSNPrintf(file_path, sizeof file_path, "%s/ns-solution-%" PetscInt_FMT ".bin", user->app_ctx->output_dir, step_no));
362   } else {
363     PetscCall(PetscSNPrintf(file_path, sizeof file_path, "%s/ns-solution.bin", user->app_ctx->output_dir));
364   }
365   PetscCall(PetscViewerBinaryOpen(user->comm, file_path, FILE_MODE_WRITE, &viewer));
366 
367   PetscInt32 token = PetscDefined(USE_64BIT_INDICES) ? FLUIDS_FILE_TOKEN_64 : FLUIDS_FILE_TOKEN_32;
368   PetscCall(PetscViewerBinaryWrite(viewer, &token, 1, PETSC_INT32));
369   PetscCall(PetscViewerBinaryWrite(viewer, &step_no, 1, PETSC_INT));
370   time /= user->units->second;  // Dimensionalize time back
371   PetscCall(PetscViewerBinaryWrite(viewer, &time, 1, PETSC_REAL));
372   PetscCall(VecView(Q, viewer));
373   PetscCall(PetscViewerDestroy(&viewer));
374   PetscFunctionReturn(PETSC_SUCCESS);
375 }
376 
377 // CSV Monitor
378 PetscErrorCode TSMonitor_WallForce(TS ts, PetscInt step_no, PetscReal time, Vec Q, void *ctx) {
379   User              user = ctx;
380   Vec               G_loc;
381   PetscInt          num_wall = user->app_ctx->wall_forces.num_wall, dim = 3;
382   const PetscInt   *walls  = user->app_ctx->wall_forces.walls;
383   PetscViewer       viewer = user->app_ctx->wall_forces.viewer;
384   PetscViewerFormat format = user->app_ctx->wall_forces.viewer_format;
385   PetscScalar      *reaction_force;
386   PetscBool         iascii;
387 
388   PetscFunctionBeginUser;
389   if (!viewer) PetscFunctionReturn(PETSC_SUCCESS);
390   PetscCall(DMGetNamedLocalVector(user->dm, "ResidualLocal", &G_loc));
391   PetscCall(PetscMalloc1(num_wall * dim, &reaction_force));
392   PetscCall(Surface_Forces_NS(user->dm, G_loc, num_wall, walls, reaction_force));
393   PetscCall(DMRestoreNamedLocalVector(user->dm, "ResidualLocal", &G_loc));
394 
395   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
396 
397   if (iascii) {
398     if (format == PETSC_VIEWER_ASCII_CSV && !user->app_ctx->wall_forces.header_written) {
399       PetscCall(PetscViewerASCIIPrintf(viewer, "Step,Time,Wall,ForceX,ForceY,ForceZ\n"));
400       user->app_ctx->wall_forces.header_written = PETSC_TRUE;
401     }
402     for (PetscInt w = 0; w < num_wall; w++) {
403       PetscInt wall = walls[w];
404       if (format == PETSC_VIEWER_ASCII_CSV) {
405         PetscCall(PetscViewerASCIIPrintf(viewer, "%" PetscInt_FMT ",%g,%" PetscInt_FMT ",%g,%g,%g\n", step_no, time, wall,
406                                          reaction_force[w * dim + 0], reaction_force[w * dim + 1], reaction_force[w * dim + 2]));
407 
408       } else {
409         PetscCall(PetscViewerASCIIPrintf(viewer, "Wall %" PetscInt_FMT " Forces: Force_x = %12g, Force_y = %12g, Force_z = %12g\n", wall,
410                                          reaction_force[w * dim + 0], reaction_force[w * dim + 1], reaction_force[w * dim + 2]));
411       }
412     }
413   }
414   PetscCall(PetscFree(reaction_force));
415   PetscFunctionReturn(PETSC_SUCCESS);
416 }
417 
418 // User provided TS Monitor
419 PetscErrorCode TSMonitor_NS(TS ts, PetscInt step_no, PetscReal time, Vec Q, void *ctx) {
420   User user = ctx;
421 
422   PetscFunctionBeginUser;
423   // Print every 'checkpoint_interval' steps
424   if (user->app_ctx->checkpoint_interval <= 0 || step_no % user->app_ctx->checkpoint_interval != 0 ||
425       (user->app_ctx->cont_steps == step_no && step_no != 0)) {
426     PetscFunctionReturn(PETSC_SUCCESS);
427   }
428 
429   PetscCall(WriteOutput(user, Q, step_no, time));
430   PetscFunctionReturn(PETSC_SUCCESS);
431 }
432 
433 // TS: Create, setup, and solve
434 PetscErrorCode TSSolve_NS(DM dm, User user, AppCtx app_ctx, Physics phys, Vec *Q, PetscScalar *f_time, TS *ts) {
435   MPI_Comm    comm = user->comm;
436   TSAdapt     adapt;
437   PetscScalar final_time;
438 
439   PetscFunctionBeginUser;
440   PetscCall(TSCreate(comm, ts));
441   PetscCall(TSSetDM(*ts, dm));
442   if (phys->implicit) {
443     PetscCall(TSSetType(*ts, TSBDF));
444     if (user->op_ifunction) {
445       PetscCall(TSSetIFunction(*ts, NULL, IFunction_NS, &user));
446     } else {  // Implicit integrators can fall back to using an RHSFunction
447       PetscCall(TSSetRHSFunction(*ts, NULL, RHS_NS, &user));
448     }
449     if (user->op_ijacobian) {
450       PetscCall(DMTSSetIJacobian(dm, FormIJacobian_NS, &user));
451       if (app_ctx->amat_type) {
452         Mat Pmat, Amat;
453         PetscCall(DMCreateMatrix(dm, &Pmat));
454         PetscCall(DMSetMatType(dm, app_ctx->amat_type));
455         PetscCall(DMCreateMatrix(dm, &Amat));
456         PetscCall(TSSetIJacobian(*ts, Amat, Pmat, NULL, NULL));
457         PetscCall(MatDestroy(&Amat));
458         PetscCall(MatDestroy(&Pmat));
459       }
460     }
461   } else {
462     PetscCheck(user->op_rhs_ctx, comm, PETSC_ERR_ARG_NULL, "Problem does not provide RHSFunction");
463     PetscCall(TSSetType(*ts, TSRK));
464     PetscCall(TSRKSetType(*ts, TSRK5F));
465     PetscCall(TSSetRHSFunction(*ts, NULL, RHS_NS, &user));
466   }
467   PetscCall(TSSetMaxTime(*ts, 500. * user->units->second));
468   PetscCall(TSSetExactFinalTime(*ts, TS_EXACTFINALTIME_STEPOVER));
469   if (app_ctx->test_type == TESTTYPE_NONE) PetscCall(TSSetErrorIfStepFails(*ts, PETSC_FALSE));
470   PetscCall(TSSetTimeStep(*ts, 1.e-2 * user->units->second));
471   if (app_ctx->test_type != TESTTYPE_NONE) {
472     PetscCall(TSSetMaxSteps(*ts, 10));
473   }
474   PetscCall(TSGetAdapt(*ts, &adapt));
475   PetscCall(TSAdaptSetStepLimits(adapt, 1.e-12 * user->units->second, 1.e2 * user->units->second));
476   PetscCall(TSSetFromOptions(*ts));
477   user->time_bc_set = -1.0;   // require all BCs be updated
478   if (app_ctx->cont_steps) {  // continue from previous timestep data
479     PetscInt    count;
480     PetscViewer viewer;
481 
482     if (app_ctx->cont_time <= 0) {  // Legacy files did not include step number and time
483       PetscCall(PetscViewerBinaryOpen(comm, app_ctx->cont_time_file, FILE_MODE_READ, &viewer));
484       PetscCall(PetscViewerBinaryRead(viewer, &app_ctx->cont_time, 1, &count, PETSC_REAL));
485       PetscCall(PetscViewerDestroy(&viewer));
486       PetscCheck(app_ctx->cont_steps != -1, comm, PETSC_ERR_ARG_INCOMP,
487                  "-continue step number not specified, but checkpoint file does not contain a step number (likely written by older code version)");
488     }
489     PetscCall(TSSetTime(*ts, app_ctx->cont_time * user->units->second));
490     PetscCall(TSSetStepNumber(*ts, app_ctx->cont_steps));
491   }
492   if (app_ctx->test_type == TESTTYPE_NONE) {
493     PetscCall(TSMonitorSet(*ts, TSMonitor_NS, user, NULL));
494   }
495   if (app_ctx->wall_forces.viewer) {
496     PetscCall(TSMonitorSet(*ts, TSMonitor_WallForce, user, NULL));
497   }
498   if (app_ctx->turb_spanstats_enable) {
499     PetscCall(TSMonitorSet(*ts, TSMonitor_TurbulenceStatistics, user, NULL));
500     CeedScalar previous_time = app_ctx->cont_time * user->units->second;
501     PetscCallCeed(user->ceed,
502                   CeedOperatorSetContextDouble(user->spanstats.op_stats_collect_ctx->op, user->spanstats.previous_time_label, &previous_time));
503   }
504   if (app_ctx->diff_filter_monitor) PetscCall(TSMonitorSet(*ts, TSMonitor_DifferentialFilter, user, NULL));
505 
506   // Solve
507   PetscReal start_time;
508   PetscInt  start_step;
509   PetscCall(TSGetTime(*ts, &start_time));
510   PetscCall(TSGetStepNumber(*ts, &start_step));
511 
512   PetscCall(PetscLogDefaultBegin());  // So we can use PetscLogStageGetPerfInfo without -log_view
513   PetscPreLoadBegin(PETSC_FALSE, "Fluids Solve");
514   PetscCall(TSSetTime(*ts, start_time));
515   PetscCall(TSSetStepNumber(*ts, start_step));
516   if (PetscPreLoadingOn) {
517     // LCOV_EXCL_START
518     SNES      snes;
519     Vec       Q_preload;
520     PetscReal rtol;
521     PetscCall(VecDuplicate(*Q, &Q_preload));
522     PetscCall(VecCopy(*Q, Q_preload));
523     PetscCall(TSGetSNES(*ts, &snes));
524     PetscCall(SNESGetTolerances(snes, NULL, &rtol, NULL, NULL, NULL));
525     PetscCall(SNESSetTolerances(snes, PETSC_DEFAULT, .99, PETSC_DEFAULT, PETSC_DEFAULT, PETSC_DEFAULT));
526     PetscCall(TSSetSolution(*ts, Q_preload));
527     PetscCall(TSStep(*ts));
528     PetscCall(SNESSetTolerances(snes, PETSC_DEFAULT, rtol, PETSC_DEFAULT, PETSC_DEFAULT, PETSC_DEFAULT));
529     PetscCall(VecDestroy(&Q_preload));
530     // LCOV_EXCL_STOP
531   } else {
532     PetscCall(PetscBarrier((PetscObject)*ts));
533     PetscCall(TSSolve(*ts, *Q));
534   }
535   PetscPreLoadEnd();
536 
537   PetscCall(TSGetSolveTime(*ts, &final_time));
538   *f_time = final_time;
539 
540   if (app_ctx->test_type == TESTTYPE_NONE) {
541     PetscInt step_no;
542     PetscCall(TSGetStepNumber(*ts, &step_no));
543     if (user->app_ctx->checkpoint_interval > 0 || user->app_ctx->checkpoint_interval == -1) {
544       PetscCall(WriteOutput(user, *Q, step_no, final_time));
545     }
546 
547     PetscLogStage      stage_id;
548     PetscEventPerfInfo stage_perf;
549 
550     PetscCall(PetscLogStageGetId("Fluids Solve", &stage_id));
551     PetscCall(PetscLogStageGetPerfInfo(stage_id, &stage_perf));
552     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Time taken for solution (sec): %g\n", stage_perf.time));
553   }
554   PetscFunctionReturn(PETSC_SUCCESS);
555 }
556