xref: /libCEED/examples/fluids/src/setupts.c (revision dd64fc8452c2d35c954858232143719e6bb2e61d)
1 // Copyright (c) 2017-2022, Lawrence Livermore National Security, LLC and other CEED contributors.
2 // All Rights Reserved. See the top-level LICENSE and NOTICE files for details.
3 //
4 // SPDX-License-Identifier: BSD-2-Clause
5 //
6 // This file is part of CEED:  http://github.com/ceed
7 
8 /// @file
9 /// Time-stepping functions for Navier-Stokes example using PETSc
10 
11 #include <ceed.h>
12 #include <petscdmplex.h>
13 #include <petscts.h>
14 
15 #include "../navierstokes.h"
16 #include "../qfunctions/newtonian_state.h"
17 
18 // Compute mass matrix for explicit scheme
19 PetscErrorCode ComputeLumpedMassMatrix(Ceed ceed, DM dm, CeedData ceed_data, Vec M) {
20   CeedQFunction        qf_mass;
21   CeedOperator         op_mass;
22   OperatorApplyContext op_mass_ctx;
23   Vec                  Ones_loc;
24   CeedInt              num_comp_q, q_data_size;
25 
26   PetscFunctionBeginUser;
27   PetscCallCeed(ceed, CeedElemRestrictionGetNumComponents(ceed_data->elem_restr_q, &num_comp_q));
28   PetscCallCeed(ceed, CeedElemRestrictionGetNumComponents(ceed_data->elem_restr_qd_i, &q_data_size));
29 
30   PetscCall(CreateMassQFunction(ceed, num_comp_q, q_data_size, &qf_mass));
31   PetscCallCeed(ceed, CeedOperatorCreate(ceed, qf_mass, NULL, NULL, &op_mass));
32   PetscCallCeed(ceed, CeedOperatorSetField(op_mass, "u", ceed_data->elem_restr_q, ceed_data->basis_q, CEED_VECTOR_ACTIVE));
33   PetscCallCeed(ceed, CeedOperatorSetField(op_mass, "qdata", ceed_data->elem_restr_qd_i, CEED_BASIS_NONE, ceed_data->q_data));
34   PetscCallCeed(ceed, CeedOperatorSetField(op_mass, "v", ceed_data->elem_restr_q, ceed_data->basis_q, CEED_VECTOR_ACTIVE));
35 
36   PetscCall(OperatorApplyContextCreate(NULL, dm, ceed, op_mass, NULL, NULL, NULL, NULL, &op_mass_ctx));
37 
38   PetscCall(DMGetLocalVector(dm, &Ones_loc));
39   PetscCall(VecSet(Ones_loc, 1));
40   PetscCall(ApplyCeedOperatorLocalToGlobal(Ones_loc, M, op_mass_ctx));
41 
42   // Invert diagonally lumped mass vector for RHS function
43   PetscCall(VecReciprocal(M));
44 
45   // Cleanup
46   PetscCall(OperatorApplyContextDestroy(op_mass_ctx));
47   PetscCall(DMRestoreLocalVector(dm, &Ones_loc));
48   PetscCallCeed(ceed, CeedQFunctionDestroy(&qf_mass));
49   PetscCallCeed(ceed, CeedOperatorDestroy(&op_mass));
50   PetscFunctionReturn(PETSC_SUCCESS);
51 }
52 
53 // Insert Boundary values if it's a new time
54 PetscErrorCode UpdateBoundaryValues(User user, Vec Q_loc, PetscReal t) {
55   PetscFunctionBeginUser;
56   if (user->time_bc_set != t) {
57     PetscCall(DMPlexInsertBoundaryValues(user->dm, PETSC_TRUE, Q_loc, t, NULL, NULL, NULL));
58     user->time_bc_set = t;
59   }
60   PetscFunctionReturn(PETSC_SUCCESS);
61 }
62 
63 // @brief Update the context label value to new value if necessary.
64 // @note This only supports labels with scalar label values (ie. not arrays)
65 PetscErrorCode UpdateContextLabel(Ceed ceed, MPI_Comm comm, PetscScalar update_value, CeedOperator op, CeedContextFieldLabel label) {
66   PetscScalar label_value;
67 
68   PetscFunctionBeginUser;
69   PetscCheck(label, comm, PETSC_ERR_ARG_BADPTR, "Label should be non-NULL");
70 
71   {
72     size_t             num_elements;
73     const PetscScalar *label_values;
74     PetscCallCeed(ceed, CeedOperatorGetContextDoubleRead(op, label, &num_elements, &label_values));
75     PetscCheck(num_elements == 1, comm, PETSC_ERR_SUP, "%s does not support labels with more than 1 value. Label has %zu values", __func__,
76                num_elements);
77     label_value = *label_values;
78     PetscCallCeed(ceed, CeedOperatorRestoreContextDoubleRead(op, label, &label_values));
79   }
80 
81   if (label_value != update_value) {
82     PetscCallCeed(ceed, CeedOperatorSetContextDouble(op, label, &update_value));
83   }
84   PetscFunctionReturn(PETSC_SUCCESS);
85 }
86 
87 // RHS (Explicit time-stepper) function setup
88 //   This is the RHS of the ODE, given as u_t = G(t,u)
89 //   This function takes in a state vector Q and writes into G
90 PetscErrorCode RHS_NS(TS ts, PetscReal t, Vec Q, Vec G, void *user_data) {
91   User        user = *(User *)user_data;
92   MPI_Comm    comm = PetscObjectComm((PetscObject)ts);
93   PetscScalar dt;
94   Vec         Q_loc = user->Q_loc;
95 
96   PetscFunctionBeginUser;
97   // Update time dependent data
98   PetscCall(UpdateBoundaryValues(user, Q_loc, t));
99   if (user->phys->solution_time_label) PetscCall(UpdateContextLabel(user->ceed, comm, t, user->op_rhs_ctx->op, user->phys->solution_time_label));
100   PetscCall(TSGetTimeStep(ts, &dt));
101   if (user->phys->timestep_size_label) PetscCall(UpdateContextLabel(user->ceed, comm, dt, user->op_rhs_ctx->op, user->phys->timestep_size_label));
102 
103   PetscCall(ApplyCeedOperatorGlobalToGlobal(Q, G, user->op_rhs_ctx));
104 
105   // Inverse of the lumped mass matrix
106   PetscCall(VecPointwiseMult(G, G, user->M_inv));
107   PetscFunctionReturn(PETSC_SUCCESS);
108 }
109 
110 // Surface forces function setup
111 static PetscErrorCode Surface_Forces_NS(DM dm, Vec G_loc, PetscInt num_walls, const PetscInt walls[], PetscScalar *reaction_force) {
112   DMLabel            face_label;
113   const PetscScalar *g;
114   PetscInt           dof, dim = 3;
115   MPI_Comm           comm;
116   PetscSection       s;
117 
118   PetscFunctionBeginUser;
119   PetscCall(PetscArrayzero(reaction_force, num_walls * dim));
120   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
121   PetscCall(DMGetLabel(dm, "Face Sets", &face_label));
122   PetscCall(VecGetArrayRead(G_loc, &g));
123   for (PetscInt w = 0; w < num_walls; w++) {
124     const PetscInt wall = walls[w];
125     IS             wall_is;
126     PetscCall(DMGetLocalSection(dm, &s));
127     PetscCall(DMLabelGetStratumIS(face_label, wall, &wall_is));
128     if (wall_is) {  // There exist such points on this process
129       PetscInt        num_points;
130       PetscInt        num_comp = 0;
131       const PetscInt *points;
132       PetscCall(PetscSectionGetFieldComponents(s, 0, &num_comp));
133       PetscCall(ISGetSize(wall_is, &num_points));
134       PetscCall(ISGetIndices(wall_is, &points));
135       for (PetscInt i = 0; i < num_points; i++) {
136         const PetscInt           p = points[i];
137         const StateConservative *r;
138         PetscCall(DMPlexPointLocalRead(dm, p, g, &r));
139         PetscCall(PetscSectionGetDof(s, p, &dof));
140         for (PetscInt node = 0; node < dof / num_comp; node++) {
141           for (PetscInt j = 0; j < 3; j++) {
142             reaction_force[w * dim + j] -= r[node].momentum[j];
143           }
144         }
145       }
146       PetscCall(ISRestoreIndices(wall_is, &points));
147     }
148     PetscCall(ISDestroy(&wall_is));
149   }
150   PetscCallMPI(MPI_Allreduce(MPI_IN_PLACE, reaction_force, dim * num_walls, MPIU_SCALAR, MPI_SUM, comm));
151   //  Restore Vectors
152   PetscCall(VecRestoreArrayRead(G_loc, &g));
153   PetscFunctionReturn(PETSC_SUCCESS);
154 }
155 
156 // Implicit time-stepper function setup
157 PetscErrorCode IFunction_NS(TS ts, PetscReal t, Vec Q, Vec Q_dot, Vec G, void *user_data) {
158   User         user = *(User *)user_data;
159   MPI_Comm     comm = PetscObjectComm((PetscObject)ts);
160   PetscScalar  dt;
161   Vec          Q_loc = user->Q_loc, Q_dot_loc = user->Q_dot_loc, G_loc;
162   PetscMemType q_mem_type, q_dot_mem_type, g_mem_type;
163 
164   PetscFunctionBeginUser;
165   // Get local vectors
166   PetscCall(DMGetNamedLocalVector(user->dm, "ResidualLocal", &G_loc));
167 
168   // Update time dependent data
169   PetscCall(UpdateBoundaryValues(user, Q_loc, t));
170   if (user->phys->solution_time_label) PetscCall(UpdateContextLabel(user->ceed, comm, t, user->op_ifunction, user->phys->solution_time_label));
171   PetscCall(TSGetTimeStep(ts, &dt));
172   if (user->phys->timestep_size_label) PetscCall(UpdateContextLabel(user->ceed, comm, dt, user->op_ifunction, user->phys->timestep_size_label));
173 
174   // Global-to-local
175   PetscCall(DMGlobalToLocalBegin(user->dm, Q, INSERT_VALUES, Q_loc));
176   PetscCall(DMGlobalToLocalBegin(user->dm, Q_dot, INSERT_VALUES, Q_dot_loc));
177   PetscCall(DMGlobalToLocalEnd(user->dm, Q, INSERT_VALUES, Q_loc));
178   PetscCall(DMGlobalToLocalEnd(user->dm, Q_dot, INSERT_VALUES, Q_dot_loc));
179 
180   // Place PETSc vectors in CEED vectors
181   PetscCall(VecReadP2C(Q_loc, &q_mem_type, user->q_ceed));
182   PetscCall(VecReadP2C(Q_dot_loc, &q_dot_mem_type, user->q_dot_ceed));
183   PetscCall(VecP2C(G_loc, &g_mem_type, user->g_ceed));
184 
185   // Apply CEED operator
186   PetscCall(PetscLogEventBegin(FLUIDS_CeedOperatorApply, Q, G, 0, 0));
187   PetscCall(PetscLogGpuTimeBegin());
188   PetscCallCeed(user->ceed, CeedOperatorApply(user->op_ifunction, user->q_ceed, user->g_ceed, CEED_REQUEST_IMMEDIATE));
189   PetscCall(PetscLogGpuTimeEnd());
190   PetscCall(PetscLogEventEnd(FLUIDS_CeedOperatorApply, Q, G, 0, 0));
191 
192   // Restore vectors
193   PetscCall(VecReadC2P(user->q_ceed, q_mem_type, Q_loc));
194   PetscCall(VecReadC2P(user->q_dot_ceed, q_dot_mem_type, Q_dot_loc));
195   PetscCall(VecC2P(user->g_ceed, g_mem_type, G_loc));
196 
197   if (user->app_ctx->sgs_model_type == SGS_MODEL_DATA_DRIVEN) {
198     PetscCall(SgsDDModelApplyIFunction(user, Q_loc, G_loc));
199   }
200 
201   // Local-to-Global
202   PetscCall(VecZeroEntries(G));
203   PetscCall(DMLocalToGlobal(user->dm, G_loc, ADD_VALUES, G));
204 
205   // Restore vectors
206   PetscCall(DMRestoreNamedLocalVector(user->dm, "ResidualLocal", &G_loc));
207   PetscFunctionReturn(PETSC_SUCCESS);
208 }
209 
210 static PetscErrorCode FormPreallocation(User user, PetscBool pbdiagonal, Mat J, CeedVector *coo_values) {
211   PetscCount ncoo;
212   PetscInt  *rows_petsc, *cols_petsc;
213 
214   PetscFunctionBeginUser;
215   if (pbdiagonal) {
216     CeedSize l_size;
217     PetscCallCeed(user->ceed, CeedOperatorGetActiveVectorLengths(user->op_ijacobian, &l_size, NULL));
218     ncoo       = l_size * 5;
219     rows_petsc = malloc(ncoo * sizeof(rows_petsc));
220     cols_petsc = malloc(ncoo * sizeof(cols_petsc));
221     for (PetscCount n = 0; n < l_size / 5; n++) {
222       for (PetscInt i = 0; i < 5; i++) {
223         for (PetscInt j = 0; j < 5; j++) {
224           rows_petsc[(n * 5 + i) * 5 + j] = n * 5 + i;
225           cols_petsc[(n * 5 + i) * 5 + j] = n * 5 + j;
226         }
227       }
228     }
229   } else {
230     CeedInt *rows_ceed, *cols_ceed;
231     PetscCallCeed(user->ceed, CeedOperatorLinearAssembleSymbolic(user->op_ijacobian, &ncoo, &rows_ceed, &cols_ceed));
232     PetscCall(IntArrayC2P(ncoo, &rows_ceed, &rows_petsc));
233     PetscCall(IntArrayC2P(ncoo, &cols_ceed, &cols_petsc));
234   }
235   PetscCall(MatSetPreallocationCOOLocal(J, ncoo, rows_petsc, cols_petsc));
236   free(rows_petsc);
237   free(cols_petsc);
238   PetscCallCeed(user->ceed, CeedVectorCreate(user->ceed, ncoo, coo_values));
239   PetscFunctionReturn(PETSC_SUCCESS);
240 }
241 
242 static PetscErrorCode FormSetValues(User user, PetscBool pbdiagonal, Mat J, CeedVector coo_values) {
243   CeedMemType        mem_type = CEED_MEM_HOST;
244   const PetscScalar *values;
245   MatType            mat_type;
246 
247   PetscFunctionBeginUser;
248   PetscCall(MatGetType(J, &mat_type));
249   if (strstr(mat_type, "kokkos") || strstr(mat_type, "cusparse")) mem_type = CEED_MEM_DEVICE;
250   if (pbdiagonal) {
251     PetscCall(PetscLogEventBegin(FLUIDS_CeedOperatorAssemblePointBlockDiagonal, J, 0, 0, 0));
252     PetscCall(PetscLogGpuTimeBegin());
253     PetscCallCeed(user->ceed, CeedOperatorLinearAssemblePointBlockDiagonal(user->op_ijacobian, coo_values, CEED_REQUEST_IMMEDIATE));
254     PetscCall(PetscLogGpuTimeEnd());
255     PetscCall(PetscLogEventEnd(FLUIDS_CeedOperatorAssemblePointBlockDiagonal, J, 0, 0, 0));
256   } else {
257     PetscCall(PetscLogEventBegin(FLUIDS_CeedOperatorAssemble, J, 0, 0, 0));
258     PetscCall(PetscLogGpuTimeBegin());
259     PetscCallCeed(user->ceed, CeedOperatorLinearAssemble(user->op_ijacobian, coo_values));
260     PetscCall(PetscLogGpuTimeEnd());
261     PetscCall(PetscLogEventEnd(FLUIDS_CeedOperatorAssemble, J, 0, 0, 0));
262   }
263   PetscCallCeed(user->ceed, CeedVectorGetArrayRead(coo_values, mem_type, &values));
264   PetscCall(MatSetValuesCOO(J, values, INSERT_VALUES));
265   PetscCallCeed(user->ceed, CeedVectorRestoreArrayRead(coo_values, &values));
266   PetscFunctionReturn(PETSC_SUCCESS);
267 }
268 
269 PetscErrorCode FormIJacobian_NS(TS ts, PetscReal t, Vec Q, Vec Q_dot, PetscReal shift, Mat J, Mat J_pre, void *user_data) {
270   User      user = *(User *)user_data;
271   Ceed      ceed = user->ceed;
272   PetscBool J_is_shell, J_is_mffd, J_pre_is_shell;
273 
274   PetscFunctionBeginUser;
275   if (user->phys->ijacobian_time_shift_label)
276     PetscCallCeed(ceed, CeedOperatorSetContextDouble(user->op_ijacobian, user->phys->ijacobian_time_shift_label, &shift));
277   PetscCall(PetscObjectTypeCompare((PetscObject)J, MATMFFD, &J_is_mffd));
278   PetscCall(PetscObjectTypeCompare((PetscObject)J, MATSHELL, &J_is_shell));
279   PetscCall(PetscObjectTypeCompare((PetscObject)J_pre, MATSHELL, &J_pre_is_shell));
280   if (!user->matrices_set_up) {
281     if (J_is_shell) {
282       OperatorApplyContext op_ijacobian_ctx;
283       OperatorApplyContextCreate(user->dm, user->dm, user->ceed, user->op_ijacobian, user->q_ceed, user->g_ceed, user->Q_dot_loc, NULL,
284                                  &op_ijacobian_ctx);
285       PetscCall(MatShellSetContext(J, op_ijacobian_ctx));
286       PetscCall(MatShellSetContextDestroy(J, (PetscErrorCode(*)(void *))OperatorApplyContextDestroy));
287       PetscCall(MatShellSetOperation(J, MATOP_MULT, (void (*)(void))MatMult_Ceed));
288       PetscCall(MatShellSetOperation(J, MATOP_GET_DIAGONAL, (void (*)(void))MatGetDiag_Ceed));
289       PetscCall(MatSetUp(J));
290     }
291     if (!J_pre_is_shell) {
292       PetscCall(FormPreallocation(user, user->app_ctx->pmat_pbdiagonal, J_pre, &user->coo_values_pmat));
293     }
294     if (J != J_pre && !J_is_shell && !J_is_mffd) {
295       PetscCall(FormPreallocation(user, PETSC_FALSE, J, &user->coo_values_amat));
296     }
297     user->matrices_set_up = true;
298   }
299   if (!J_pre_is_shell) {
300     PetscCall(FormSetValues(user, user->app_ctx->pmat_pbdiagonal, J_pre, user->coo_values_pmat));
301   }
302   if (user->coo_values_amat) {
303     PetscCall(FormSetValues(user, PETSC_FALSE, J, user->coo_values_amat));
304   } else if (J_is_mffd) {
305     PetscCall(MatAssemblyBegin(J, MAT_FINAL_ASSEMBLY));
306     PetscCall(MatAssemblyEnd(J, MAT_FINAL_ASSEMBLY));
307   }
308   PetscFunctionReturn(PETSC_SUCCESS);
309 }
310 
311 PetscErrorCode WriteOutput(User user, Vec Q, PetscInt step_no, PetscScalar time) {
312   Vec         Q_loc;
313   char        file_path[PETSC_MAX_PATH_LEN];
314   PetscViewer viewer;
315 
316   PetscFunctionBeginUser;
317   if (user->app_ctx->checkpoint_vtk) {
318     // Set up output
319     PetscCall(DMGetLocalVector(user->dm, &Q_loc));
320     PetscCall(PetscObjectSetName((PetscObject)Q_loc, "StateVec"));
321     PetscCall(VecZeroEntries(Q_loc));
322     PetscCall(DMGlobalToLocal(user->dm, Q, INSERT_VALUES, Q_loc));
323 
324     // Output
325     PetscCall(PetscSNPrintf(file_path, sizeof file_path, "%s/ns-%03" PetscInt_FMT ".vtu", user->app_ctx->output_dir, step_no));
326 
327     PetscCall(PetscViewerVTKOpen(PetscObjectComm((PetscObject)Q), file_path, FILE_MODE_WRITE, &viewer));
328     PetscCall(VecView(Q_loc, viewer));
329     PetscCall(PetscViewerDestroy(&viewer));
330     if (user->dm_viz) {
331       Vec         Q_refined, Q_refined_loc;
332       char        file_path_refined[PETSC_MAX_PATH_LEN];
333       PetscViewer viewer_refined;
334 
335       PetscCall(DMGetGlobalVector(user->dm_viz, &Q_refined));
336       PetscCall(DMGetLocalVector(user->dm_viz, &Q_refined_loc));
337       PetscCall(PetscObjectSetName((PetscObject)Q_refined_loc, "Refined"));
338 
339       PetscCall(MatInterpolate(user->interp_viz, Q, Q_refined));
340       PetscCall(VecZeroEntries(Q_refined_loc));
341       PetscCall(DMGlobalToLocal(user->dm_viz, Q_refined, INSERT_VALUES, Q_refined_loc));
342 
343       PetscCall(
344           PetscSNPrintf(file_path_refined, sizeof file_path_refined, "%s/nsrefined-%03" PetscInt_FMT ".vtu", user->app_ctx->output_dir, step_no));
345 
346       PetscCall(PetscViewerVTKOpen(PetscObjectComm((PetscObject)Q_refined), file_path_refined, FILE_MODE_WRITE, &viewer_refined));
347       PetscCall(VecView(Q_refined_loc, viewer_refined));
348       PetscCall(DMRestoreLocalVector(user->dm_viz, &Q_refined_loc));
349       PetscCall(DMRestoreGlobalVector(user->dm_viz, &Q_refined));
350       PetscCall(PetscViewerDestroy(&viewer_refined));
351     }
352     PetscCall(DMRestoreLocalVector(user->dm, &Q_loc));
353   }
354 
355   // Save data in a binary file for continuation of simulations
356   if (user->app_ctx->add_stepnum2bin) {
357     PetscCall(PetscSNPrintf(file_path, sizeof file_path, "%s/ns-solution-%" PetscInt_FMT ".bin", user->app_ctx->output_dir, step_no));
358   } else {
359     PetscCall(PetscSNPrintf(file_path, sizeof file_path, "%s/ns-solution.bin", user->app_ctx->output_dir));
360   }
361   PetscCall(PetscViewerBinaryOpen(user->comm, file_path, FILE_MODE_WRITE, &viewer));
362 
363   PetscInt32 token = PetscDefined(USE_64BIT_INDICES) ? FLUIDS_FILE_TOKEN_64 : FLUIDS_FILE_TOKEN_32;
364   PetscCall(PetscViewerBinaryWrite(viewer, &token, 1, PETSC_INT32));
365   PetscCall(PetscViewerBinaryWrite(viewer, &step_no, 1, PETSC_INT));
366   time /= user->units->second;  // Dimensionalize time back
367   PetscCall(PetscViewerBinaryWrite(viewer, &time, 1, PETSC_REAL));
368   PetscCall(VecView(Q, viewer));
369   PetscCall(PetscViewerDestroy(&viewer));
370   PetscFunctionReturn(PETSC_SUCCESS);
371 }
372 
373 // CSV Monitor
374 PetscErrorCode TSMonitor_WallForce(TS ts, PetscInt step_no, PetscReal time, Vec Q, void *ctx) {
375   User              user = ctx;
376   Vec               G_loc;
377   PetscInt          num_wall = user->app_ctx->wall_forces.num_wall, dim = 3;
378   const PetscInt   *walls  = user->app_ctx->wall_forces.walls;
379   PetscViewer       viewer = user->app_ctx->wall_forces.viewer;
380   PetscViewerFormat format = user->app_ctx->wall_forces.viewer_format;
381   PetscScalar      *reaction_force;
382   PetscBool         iascii;
383 
384   PetscFunctionBeginUser;
385   if (!viewer) PetscFunctionReturn(PETSC_SUCCESS);
386   PetscCall(DMGetNamedLocalVector(user->dm, "ResidualLocal", &G_loc));
387   PetscCall(PetscMalloc1(num_wall * dim, &reaction_force));
388   PetscCall(Surface_Forces_NS(user->dm, G_loc, num_wall, walls, reaction_force));
389   PetscCall(DMRestoreNamedLocalVector(user->dm, "ResidualLocal", &G_loc));
390 
391   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
392 
393   if (iascii) {
394     if (format == PETSC_VIEWER_ASCII_CSV && !user->app_ctx->wall_forces.header_written) {
395       PetscCall(PetscViewerASCIIPrintf(viewer, "Step,Time,Wall,ForceX,ForceY,ForceZ\n"));
396       user->app_ctx->wall_forces.header_written = PETSC_TRUE;
397     }
398     for (PetscInt w = 0; w < num_wall; w++) {
399       PetscInt wall = walls[w];
400       if (format == PETSC_VIEWER_ASCII_CSV) {
401         PetscCall(PetscViewerASCIIPrintf(viewer, "%" PetscInt_FMT ",%g,%" PetscInt_FMT ",%g,%g,%g\n", step_no, time, wall,
402                                          reaction_force[w * dim + 0], reaction_force[w * dim + 1], reaction_force[w * dim + 2]));
403 
404       } else {
405         PetscCall(PetscViewerASCIIPrintf(viewer, "Wall %" PetscInt_FMT " Forces: Force_x = %12g, Force_y = %12g, Force_z = %12g\n", wall,
406                                          reaction_force[w * dim + 0], reaction_force[w * dim + 1], reaction_force[w * dim + 2]));
407       }
408     }
409   }
410   PetscCall(PetscFree(reaction_force));
411   PetscFunctionReturn(PETSC_SUCCESS);
412 }
413 
414 // User provided TS Monitor
415 PetscErrorCode TSMonitor_NS(TS ts, PetscInt step_no, PetscReal time, Vec Q, void *ctx) {
416   User user = ctx;
417 
418   PetscFunctionBeginUser;
419   // Print every 'checkpoint_interval' steps
420   if (user->app_ctx->checkpoint_interval <= 0 || step_no % user->app_ctx->checkpoint_interval != 0 ||
421       (user->app_ctx->cont_steps == step_no && step_no != 0)) {
422     PetscFunctionReturn(PETSC_SUCCESS);
423   }
424 
425   PetscCall(WriteOutput(user, Q, step_no, time));
426   PetscFunctionReturn(PETSC_SUCCESS);
427 }
428 
429 // TS: Create, setup, and solve
430 PetscErrorCode TSSolve_NS(DM dm, User user, AppCtx app_ctx, Physics phys, Vec *Q, PetscScalar *f_time, TS *ts) {
431   MPI_Comm    comm = user->comm;
432   TSAdapt     adapt;
433   PetscScalar final_time;
434 
435   PetscFunctionBeginUser;
436   PetscCall(TSCreate(comm, ts));
437   PetscCall(TSSetDM(*ts, dm));
438   if (phys->implicit) {
439     PetscCall(TSSetType(*ts, TSBDF));
440     if (user->op_ifunction) {
441       PetscCall(TSSetIFunction(*ts, NULL, IFunction_NS, &user));
442     } else {  // Implicit integrators can fall back to using an RHSFunction
443       PetscCall(TSSetRHSFunction(*ts, NULL, RHS_NS, &user));
444     }
445     if (user->op_ijacobian) {
446       PetscCall(DMTSSetIJacobian(dm, FormIJacobian_NS, &user));
447       if (app_ctx->amat_type) {
448         Mat Pmat, Amat;
449         PetscCall(DMCreateMatrix(dm, &Pmat));
450         PetscCall(DMSetMatType(dm, app_ctx->amat_type));
451         PetscCall(DMCreateMatrix(dm, &Amat));
452         PetscCall(TSSetIJacobian(*ts, Amat, Pmat, NULL, NULL));
453         PetscCall(MatDestroy(&Amat));
454         PetscCall(MatDestroy(&Pmat));
455       }
456     }
457   } else {
458     PetscCheck(user->op_rhs_ctx, comm, PETSC_ERR_ARG_NULL, "Problem does not provide RHSFunction");
459     PetscCall(TSSetType(*ts, TSRK));
460     PetscCall(TSRKSetType(*ts, TSRK5F));
461     PetscCall(TSSetRHSFunction(*ts, NULL, RHS_NS, &user));
462   }
463   PetscCall(TSSetMaxTime(*ts, 500. * user->units->second));
464   PetscCall(TSSetExactFinalTime(*ts, TS_EXACTFINALTIME_STEPOVER));
465   if (app_ctx->test_type == TESTTYPE_NONE) PetscCall(TSSetErrorIfStepFails(*ts, PETSC_FALSE));
466   PetscCall(TSSetTimeStep(*ts, 1.e-2 * user->units->second));
467   if (app_ctx->test_type != TESTTYPE_NONE) {
468     PetscCall(TSSetMaxSteps(*ts, 10));
469   }
470   PetscCall(TSGetAdapt(*ts, &adapt));
471   PetscCall(TSAdaptSetStepLimits(adapt, 1.e-12 * user->units->second, 1.e2 * user->units->second));
472   PetscCall(TSSetFromOptions(*ts));
473   user->time_bc_set = -1.0;   // require all BCs be updated
474   if (app_ctx->cont_steps) {  // continue from previous timestep data
475     PetscInt    count;
476     PetscViewer viewer;
477 
478     if (app_ctx->cont_time <= 0) {  // Legacy files did not include step number and time
479       PetscCall(PetscViewerBinaryOpen(comm, app_ctx->cont_time_file, FILE_MODE_READ, &viewer));
480       PetscCall(PetscViewerBinaryRead(viewer, &app_ctx->cont_time, 1, &count, PETSC_REAL));
481       PetscCall(PetscViewerDestroy(&viewer));
482       PetscCheck(app_ctx->cont_steps != -1, comm, PETSC_ERR_ARG_INCOMP,
483                  "-continue step number not specified, but checkpoint file does not contain a step number (likely written by older code version)");
484     }
485     PetscCall(TSSetTime(*ts, app_ctx->cont_time * user->units->second));
486     PetscCall(TSSetStepNumber(*ts, app_ctx->cont_steps));
487   }
488   if (app_ctx->test_type == TESTTYPE_NONE) {
489     PetscCall(TSMonitorSet(*ts, TSMonitor_NS, user, NULL));
490   }
491   if (app_ctx->wall_forces.viewer) {
492     PetscCall(TSMonitorSet(*ts, TSMonitor_WallForce, user, NULL));
493   }
494   if (app_ctx->turb_spanstats_enable) {
495     PetscCall(TSMonitorSet(*ts, TSMonitor_TurbulenceStatistics, user, NULL));
496     CeedScalar previous_time = app_ctx->cont_time * user->units->second;
497     PetscCallCeed(user->ceed,
498                   CeedOperatorSetContextDouble(user->spanstats.op_stats_collect_ctx->op, user->spanstats.previous_time_label, &previous_time));
499   }
500   if (app_ctx->diff_filter_monitor) PetscCall(TSMonitorSet(*ts, TSMonitor_DifferentialFilter, user, NULL));
501 
502   // Solve
503   PetscReal start_time;
504   PetscInt  start_step;
505   PetscCall(TSGetTime(*ts, &start_time));
506   PetscCall(TSGetStepNumber(*ts, &start_step));
507 
508   PetscCall(PetscLogDefaultBegin());  // So we can use PetscLogStageGetPerfInfo without -log_view
509   PetscPreLoadBegin(PETSC_FALSE, "Fluids Solve");
510   PetscCall(TSSetTime(*ts, start_time));
511   PetscCall(TSSetStepNumber(*ts, start_step));
512   if (PetscPreLoadingOn) {
513     // LCOV_EXCL_START
514     SNES      snes;
515     Vec       Q_preload;
516     PetscReal rtol;
517     PetscCall(VecDuplicate(*Q, &Q_preload));
518     PetscCall(VecCopy(*Q, Q_preload));
519     PetscCall(TSGetSNES(*ts, &snes));
520     PetscCall(SNESGetTolerances(snes, NULL, &rtol, NULL, NULL, NULL));
521     PetscCall(SNESSetTolerances(snes, PETSC_DEFAULT, .99, PETSC_DEFAULT, PETSC_DEFAULT, PETSC_DEFAULT));
522     PetscCall(TSSetSolution(*ts, Q_preload));
523     PetscCall(TSStep(*ts));
524     PetscCall(SNESSetTolerances(snes, PETSC_DEFAULT, rtol, PETSC_DEFAULT, PETSC_DEFAULT, PETSC_DEFAULT));
525     PetscCall(VecDestroy(&Q_preload));
526     // LCOV_EXCL_STOP
527   } else {
528     PetscCall(PetscBarrier((PetscObject)*ts));
529     PetscCall(TSSolve(*ts, *Q));
530   }
531   PetscPreLoadEnd();
532 
533   PetscCall(TSGetSolveTime(*ts, &final_time));
534   *f_time = final_time;
535 
536   if (app_ctx->test_type == TESTTYPE_NONE) {
537     PetscInt step_no;
538     PetscCall(TSGetStepNumber(*ts, &step_no));
539     if (user->app_ctx->checkpoint_interval > 0 || user->app_ctx->checkpoint_interval == -1) {
540       PetscCall(WriteOutput(user, *Q, step_no, final_time));
541     }
542 
543     PetscLogStage      stage_id;
544     PetscEventPerfInfo stage_perf;
545 
546     PetscCall(PetscLogStageGetId("Fluids Solve", &stage_id));
547     PetscCall(PetscLogStageGetPerfInfo(stage_id, &stage_perf));
548     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Time taken for solution (sec): %g\n", stage_perf.time));
549   }
550   PetscFunctionReturn(PETSC_SUCCESS);
551 }
552