xref: /libCEED/examples/fluids/src/setupts.c (revision 4210c567ca9f6fc1a2313b270ba3edc9b62e5ca4)
1 // Copyright (c) 2017-2022, Lawrence Livermore National Security, LLC and other CEED contributors.
2 // All Rights Reserved. See the top-level LICENSE and NOTICE files for details.
3 //
4 // SPDX-License-Identifier: BSD-2-Clause
5 //
6 // This file is part of CEED:  http://github.com/ceed
7 
8 /// @file
9 /// Time-stepping functions for Navier-Stokes example using PETSc
10 
11 #include <ceed.h>
12 #include <petscdmplex.h>
13 #include <petscts.h>
14 
15 #include "../navierstokes.h"
16 #include "../qfunctions/newtonian_state.h"
17 
18 // Compute mass matrix for explicit scheme
19 PetscErrorCode ComputeLumpedMassMatrix(Ceed ceed, DM dm, CeedData ceed_data, Vec M) {
20   Vec           M_loc;
21   CeedQFunction qf_mass;
22   CeedOperator  op_mass;
23   CeedVector    m_ceed, ones_vec;
24   CeedInt       num_comp_q, q_data_size;
25   PetscFunctionBeginUser;
26 
27   // CEED Restriction
28   CeedElemRestrictionGetNumComponents(ceed_data->elem_restr_q, &num_comp_q);
29   CeedElemRestrictionGetNumComponents(ceed_data->elem_restr_qd_i, &q_data_size);
30   CeedElemRestrictionCreateVector(ceed_data->elem_restr_q, &m_ceed, NULL);
31   CeedElemRestrictionCreateVector(ceed_data->elem_restr_q, &ones_vec, NULL);
32   CeedVectorSetValue(ones_vec, 1.0);
33 
34   // CEED QFunction
35   PetscCall(CreateMassQFunction(ceed, num_comp_q, q_data_size, &qf_mass));
36 
37   // CEED Operator
38   CeedOperatorCreate(ceed, qf_mass, NULL, NULL, &op_mass);
39   CeedOperatorSetField(op_mass, "u", ceed_data->elem_restr_q, ceed_data->basis_q, CEED_VECTOR_ACTIVE);
40   CeedOperatorSetField(op_mass, "qdata", ceed_data->elem_restr_qd_i, CEED_BASIS_COLLOCATED, ceed_data->q_data);
41   CeedOperatorSetField(op_mass, "v", ceed_data->elem_restr_q, ceed_data->basis_q, CEED_VECTOR_ACTIVE);
42 
43   // Place PETSc vector in CEED vector
44   PetscMemType m_mem_type;
45   PetscCall(DMGetLocalVector(dm, &M_loc));
46   PetscCall(VecP2C(M_loc, &m_mem_type, m_ceed));
47 
48   // Apply CEED Operator
49   CeedOperatorApply(op_mass, ones_vec, m_ceed, CEED_REQUEST_IMMEDIATE);
50 
51   // Restore vectors
52   PetscCall(VecC2P(m_ceed, m_mem_type, M_loc));
53 
54   // Local-to-Global
55   PetscCall(VecZeroEntries(M));
56   PetscCall(DMLocalToGlobal(dm, M_loc, ADD_VALUES, M));
57   PetscCall(DMRestoreLocalVector(dm, &M_loc));
58 
59   // Invert diagonally lumped mass vector for RHS function
60   PetscCall(VecReciprocal(M));
61 
62   // Cleanup
63   CeedVectorDestroy(&ones_vec);
64   CeedVectorDestroy(&m_ceed);
65   CeedQFunctionDestroy(&qf_mass);
66   CeedOperatorDestroy(&op_mass);
67 
68   PetscFunctionReturn(0);
69 }
70 
71 // Insert Boundary values if it's a new time
72 PetscErrorCode UpdateBoundaryValues(User user, Vec Q_loc, PetscReal t) {
73   PetscFunctionBeginUser;
74   if (user->time_bc_set != t) {
75     PetscCall(DMPlexInsertBoundaryValues(user->dm, PETSC_TRUE, Q_loc, t, NULL, NULL, NULL));
76     user->time_bc_set = t;
77   }
78   PetscFunctionReturn(0);
79 }
80 
81 // @brief Update the context label value to new value if necessary.
82 // @note This only supports labels with scalar label values (ie. not arrays)
83 PetscErrorCode UpdateContextLabel(MPI_Comm comm, PetscScalar update_value, CeedOperator op, CeedContextFieldLabel label) {
84   PetscScalar label_value;
85 
86   PetscFunctionBeginUser;
87   PetscCheck(label, comm, PETSC_ERR_ARG_BADPTR, "Label should be non-NULL");
88 
89   {
90     size_t             num_elements;
91     const PetscScalar *label_values;
92     CeedOperatorGetContextDoubleRead(op, label, &num_elements, &label_values);
93     PetscCheck(num_elements == 1, comm, PETSC_ERR_SUP, "%s does not support labels with more than 1 value. Label has %zu values", __func__,
94                num_elements);
95     label_value = *label_values;
96     CeedOperatorRestoreContextDoubleRead(op, label, &label_values);
97   }
98 
99   if (label_value != update_value) {
100     CeedOperatorSetContextDouble(op, label, &update_value);
101   }
102   PetscFunctionReturn(0);
103 }
104 
105 // RHS (Explicit time-stepper) function setup
106 //   This is the RHS of the ODE, given as u_t = G(t,u)
107 //   This function takes in a state vector Q and writes into G
108 PetscErrorCode RHS_NS(TS ts, PetscReal t, Vec Q, Vec G, void *user_data) {
109   User         user = *(User *)user_data;
110   MPI_Comm     comm = PetscObjectComm((PetscObject)ts);
111   PetscScalar  dt;
112   Vec          Q_loc = user->Q_loc, G_loc;
113   PetscMemType q_mem_type, g_mem_type;
114   PetscFunctionBeginUser;
115 
116   // Get local vector
117   PetscCall(DMGetLocalVector(user->dm, &G_loc));
118 
119   // Update time dependent data
120   PetscCall(UpdateBoundaryValues(user, Q_loc, t));
121   if (user->phys->solution_time_label) PetscCall(UpdateContextLabel(comm, t, user->op_rhs, user->phys->solution_time_label));
122   PetscCall(TSGetTimeStep(ts, &dt));
123   if (user->phys->timestep_size_label) PetscCall(UpdateContextLabel(comm, dt, user->op_rhs, user->phys->timestep_size_label));
124 
125   // Global-to-local
126   PetscCall(DMGlobalToLocal(user->dm, Q, INSERT_VALUES, Q_loc));
127 
128   // Place PETSc vectors in CEED vectors
129   PetscCall(VecReadP2C(Q_loc, &q_mem_type, user->q_ceed));
130   PetscCall(VecP2C(G_loc, &g_mem_type, user->g_ceed));
131 
132   // Apply CEED operator
133   CeedOperatorApply(user->op_rhs, user->q_ceed, user->g_ceed, CEED_REQUEST_IMMEDIATE);
134 
135   // Restore vectors
136   PetscCall(VecReadC2P(user->q_ceed, q_mem_type, Q_loc));
137   PetscCall(VecC2P(user->g_ceed, g_mem_type, G_loc));
138 
139   // Local-to-Global
140   PetscCall(VecZeroEntries(G));
141   PetscCall(DMLocalToGlobal(user->dm, G_loc, ADD_VALUES, G));
142 
143   // Inverse of the lumped mass matrix (M is Minv)
144   PetscCall(VecPointwiseMult(G, G, user->M));
145 
146   // Restore vectors
147   PetscCall(DMRestoreLocalVector(user->dm, &G_loc));
148 
149   PetscFunctionReturn(0);
150 }
151 
152 // Surface forces function setup
153 static PetscErrorCode Surface_Forces_NS(DM dm, Vec G_loc, PetscInt num_walls, const PetscInt walls[], PetscScalar *reaction_force) {
154   DMLabel            face_label;
155   const PetscScalar *g;
156   PetscInt           dof, dim = 3;
157   MPI_Comm           comm;
158   PetscSection       s;
159 
160   PetscFunctionBeginUser;
161   PetscCall(PetscArrayzero(reaction_force, num_walls * dim));
162   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
163   PetscCall(DMGetLabel(dm, "Face Sets", &face_label));
164   PetscCall(VecGetArrayRead(G_loc, &g));
165   for (PetscInt w = 0; w < num_walls; w++) {
166     const PetscInt wall = walls[w];
167     IS             wall_is;
168     PetscCall(DMGetLocalSection(dm, &s));
169     PetscCall(DMLabelGetStratumIS(face_label, wall, &wall_is));
170     if (wall_is) {  // There exist such points on this process
171       PetscInt        num_points;
172       PetscInt        num_comp = 0;
173       const PetscInt *points;
174       PetscCall(PetscSectionGetFieldComponents(s, 0, &num_comp));
175       PetscCall(ISGetSize(wall_is, &num_points));
176       PetscCall(ISGetIndices(wall_is, &points));
177       for (PetscInt i = 0; i < num_points; i++) {
178         const PetscInt           p = points[i];
179         const StateConservative *r;
180         PetscCall(DMPlexPointLocalRead(dm, p, g, &r));
181         PetscCall(PetscSectionGetDof(s, p, &dof));
182         for (PetscInt node = 0; node < dof / num_comp; node++) {
183           for (PetscInt j = 0; j < 3; j++) {
184             reaction_force[w * dim + j] -= r[node].momentum[j];
185           }
186         }
187       }
188       PetscCall(ISRestoreIndices(wall_is, &points));
189     }
190     PetscCall(ISDestroy(&wall_is));
191   }
192   PetscCallMPI(MPI_Allreduce(MPI_IN_PLACE, reaction_force, dim * num_walls, MPIU_SCALAR, MPI_SUM, comm));
193   //  Restore Vectors
194   PetscCall(VecRestoreArrayRead(G_loc, &g));
195 
196   PetscFunctionReturn(0);
197 }
198 
199 // Implicit time-stepper function setup
200 PetscErrorCode IFunction_NS(TS ts, PetscReal t, Vec Q, Vec Q_dot, Vec G, void *user_data) {
201   User         user = *(User *)user_data;
202   MPI_Comm     comm = PetscObjectComm((PetscObject)ts);
203   PetscScalar  dt;
204   Vec          Q_loc = user->Q_loc, Q_dot_loc = user->Q_dot_loc, G_loc;
205   PetscMemType q_mem_type, q_dot_mem_type, g_mem_type;
206   PetscFunctionBeginUser;
207 
208   // Get local vectors
209   PetscCall(DMGetNamedLocalVector(user->dm, "ResidualLocal", &G_loc));
210 
211   // Update time dependent data
212   PetscCall(UpdateBoundaryValues(user, Q_loc, t));
213   if (user->phys->solution_time_label) PetscCall(UpdateContextLabel(comm, t, user->op_ifunction, user->phys->solution_time_label));
214   PetscCall(TSGetTimeStep(ts, &dt));
215   if (user->phys->timestep_size_label) PetscCall(UpdateContextLabel(comm, dt, user->op_ifunction, user->phys->timestep_size_label));
216 
217   // Global-to-local
218   PetscCall(DMGlobalToLocalBegin(user->dm, Q, INSERT_VALUES, Q_loc));
219   PetscCall(DMGlobalToLocalBegin(user->dm, Q_dot, INSERT_VALUES, Q_dot_loc));
220   PetscCall(DMGlobalToLocalEnd(user->dm, Q, INSERT_VALUES, Q_loc));
221   PetscCall(DMGlobalToLocalEnd(user->dm, Q_dot, INSERT_VALUES, Q_dot_loc));
222 
223   // Place PETSc vectors in CEED vectors
224   PetscCall(VecReadP2C(Q_loc, &q_mem_type, user->q_ceed));
225   PetscCall(VecReadP2C(Q_dot_loc, &q_dot_mem_type, user->q_dot_ceed));
226   PetscCall(VecP2C(G_loc, &g_mem_type, user->g_ceed));
227 
228   // Apply CEED operator
229   CeedOperatorApply(user->op_ifunction, user->q_ceed, user->g_ceed, CEED_REQUEST_IMMEDIATE);
230 
231   // Restore vectors
232   PetscCall(VecReadC2P(user->q_ceed, q_mem_type, Q_loc));
233   PetscCall(VecReadC2P(user->q_dot_ceed, q_dot_mem_type, Q_dot_loc));
234   PetscCall(VecC2P(user->g_ceed, g_mem_type, G_loc));
235 
236   if (user->app_ctx->sgs_model_type == SGS_MODEL_DATA_DRIVEN) {
237     PetscCall(SGS_DD_ModelApplyIFunction(user, Q_loc, G_loc));
238   }
239 
240   // Local-to-Global
241   PetscCall(VecZeroEntries(G));
242   PetscCall(DMLocalToGlobal(user->dm, G_loc, ADD_VALUES, G));
243 
244   // Restore vectors
245   PetscCall(DMRestoreNamedLocalVector(user->dm, "ResidualLocal", &G_loc));
246 
247   PetscFunctionReturn(0);
248 }
249 
250 static PetscErrorCode MatMult_NS_IJacobian(Mat J, Vec Q, Vec G) {
251   User               user;
252   const PetscScalar *q;
253   PetscScalar       *g;
254   PetscMemType       q_mem_type, g_mem_type;
255   PetscFunctionBeginUser;
256   PetscCall(MatShellGetContext(J, &user));
257   Vec Q_loc = user->Q_dot_loc,  // Note - Q_dot_loc has zero BCs
258       G_loc;
259 
260   // Get local vectors
261   PetscCall(DMGetLocalVector(user->dm, &G_loc));
262 
263   // Global-to-local
264   PetscCall(DMGlobalToLocal(user->dm, Q, INSERT_VALUES, Q_loc));
265 
266   // Place PETSc vectors in CEED vectors
267   PetscCall(VecGetArrayReadAndMemType(Q_loc, &q, &q_mem_type));
268   PetscCall(VecGetArrayAndMemType(G_loc, &g, &g_mem_type));
269   CeedVectorSetArray(user->q_ceed, MemTypeP2C(q_mem_type), CEED_USE_POINTER, (PetscScalar *)q);
270   CeedVectorSetArray(user->g_ceed, MemTypeP2C(g_mem_type), CEED_USE_POINTER, g);
271 
272   // Apply CEED operator
273   CeedOperatorApply(user->op_ijacobian, user->q_ceed, user->g_ceed, CEED_REQUEST_IMMEDIATE);
274 
275   // Restore vectors
276   CeedVectorTakeArray(user->q_ceed, MemTypeP2C(q_mem_type), NULL);
277   CeedVectorTakeArray(user->g_ceed, MemTypeP2C(g_mem_type), NULL);
278   PetscCall(VecRestoreArrayReadAndMemType(Q_loc, &q));
279   PetscCall(VecRestoreArrayAndMemType(G_loc, &g));
280 
281   // Local-to-Global
282   PetscCall(VecZeroEntries(G));
283   PetscCall(DMLocalToGlobal(user->dm, G_loc, ADD_VALUES, G));
284 
285   // Restore vectors
286   PetscCall(DMRestoreLocalVector(user->dm, &G_loc));
287   PetscFunctionReturn(0);
288 }
289 
290 PetscErrorCode MatGetDiagonal_NS_IJacobian(Mat A, Vec D) {
291   User         user;
292   Vec          D_loc;
293   PetscScalar *d;
294   PetscMemType mem_type;
295 
296   PetscFunctionBeginUser;
297   MatShellGetContext(A, &user);
298   PetscCall(DMGetLocalVector(user->dm, &D_loc));
299   PetscCall(VecGetArrayAndMemType(D_loc, &d, &mem_type));
300   CeedVectorSetArray(user->g_ceed, MemTypeP2C(mem_type), CEED_USE_POINTER, d);
301   CeedOperatorLinearAssembleDiagonal(user->op_ijacobian, user->g_ceed, CEED_REQUEST_IMMEDIATE);
302   CeedVectorTakeArray(user->g_ceed, MemTypeP2C(mem_type), NULL);
303   PetscCall(VecRestoreArrayAndMemType(D_loc, &d));
304   PetscCall(VecZeroEntries(D));
305   PetscCall(DMLocalToGlobal(user->dm, D_loc, ADD_VALUES, D));
306   PetscCall(DMRestoreLocalVector(user->dm, &D_loc));
307   VecViewFromOptions(D, NULL, "-diag_vec_view");
308   PetscFunctionReturn(0);
309 }
310 
311 static PetscErrorCode FormPreallocation(User user, PetscBool pbdiagonal, Mat J, CeedVector *coo_values) {
312   PetscCount ncoo;
313   PetscInt  *rows, *cols;
314 
315   PetscFunctionBeginUser;
316   if (pbdiagonal) {
317     CeedSize l_size;
318     CeedOperatorGetActiveVectorLengths(user->op_ijacobian, &l_size, NULL);
319     ncoo = l_size * 5;
320     rows = malloc(ncoo * sizeof(rows[0]));
321     cols = malloc(ncoo * sizeof(cols[0]));
322     for (PetscCount n = 0; n < l_size / 5; n++) {
323       for (PetscInt i = 0; i < 5; i++) {
324         for (PetscInt j = 0; j < 5; j++) {
325           rows[(n * 5 + i) * 5 + j] = n * 5 + i;
326           cols[(n * 5 + i) * 5 + j] = n * 5 + j;
327         }
328       }
329     }
330   } else {
331     PetscCall(CeedOperatorLinearAssembleSymbolic(user->op_ijacobian, &ncoo, &rows, &cols));
332   }
333   PetscCall(MatSetPreallocationCOOLocal(J, ncoo, rows, cols));
334   free(rows);
335   free(cols);
336   CeedVectorCreate(user->ceed, ncoo, coo_values);
337   PetscFunctionReturn(0);
338 }
339 
340 static PetscErrorCode FormSetValues(User user, PetscBool pbdiagonal, Mat J, CeedVector coo_values) {
341   CeedMemType        mem_type = CEED_MEM_HOST;
342   const PetscScalar *values;
343   MatType            mat_type;
344 
345   PetscFunctionBeginUser;
346   PetscCall(MatGetType(J, &mat_type));
347   if (strstr(mat_type, "kokkos") || strstr(mat_type, "cusparse")) mem_type = CEED_MEM_DEVICE;
348   if (user->app_ctx->pmat_pbdiagonal) {
349     CeedOperatorLinearAssemblePointBlockDiagonal(user->op_ijacobian, coo_values, CEED_REQUEST_IMMEDIATE);
350   } else {
351     CeedOperatorLinearAssemble(user->op_ijacobian, coo_values);
352   }
353   CeedVectorGetArrayRead(coo_values, mem_type, &values);
354   PetscCall(MatSetValuesCOO(J, values, INSERT_VALUES));
355   CeedVectorRestoreArrayRead(coo_values, &values);
356   PetscFunctionReturn(0);
357 }
358 
359 PetscErrorCode FormIJacobian_NS(TS ts, PetscReal t, Vec Q, Vec Q_dot, PetscReal shift, Mat J, Mat J_pre, void *user_data) {
360   User      user = *(User *)user_data;
361   PetscBool J_is_shell, J_is_mffd, J_pre_is_shell;
362   PetscFunctionBeginUser;
363   if (user->phys->ijacobian_time_shift_label) CeedOperatorSetContextDouble(user->op_ijacobian, user->phys->ijacobian_time_shift_label, &shift);
364   PetscCall(PetscObjectTypeCompare((PetscObject)J, MATMFFD, &J_is_mffd));
365   PetscCall(PetscObjectTypeCompare((PetscObject)J, MATSHELL, &J_is_shell));
366   PetscCall(PetscObjectTypeCompare((PetscObject)J_pre, MATSHELL, &J_pre_is_shell));
367   if (!user->matrices_set_up) {
368     if (J_is_shell) {
369       PetscCall(MatShellSetContext(J, user));
370       PetscCall(MatShellSetOperation(J, MATOP_MULT, (void (*)(void))MatMult_NS_IJacobian));
371       PetscCall(MatShellSetOperation(J, MATOP_GET_DIAGONAL, (void (*)(void))MatGetDiagonal_NS_IJacobian));
372       PetscCall(MatSetUp(J));
373     }
374     if (!J_pre_is_shell) {
375       PetscCall(FormPreallocation(user, user->app_ctx->pmat_pbdiagonal, J_pre, &user->coo_values_pmat));
376     }
377     if (J != J_pre && !J_is_shell && !J_is_mffd) {
378       PetscCall(FormPreallocation(user, PETSC_FALSE, J, &user->coo_values_amat));
379     }
380     user->matrices_set_up = true;
381   }
382   if (!J_pre_is_shell) {
383     PetscCall(FormSetValues(user, user->app_ctx->pmat_pbdiagonal, J_pre, user->coo_values_pmat));
384   }
385   if (user->coo_values_amat) {
386     PetscCall(FormSetValues(user, PETSC_FALSE, J, user->coo_values_amat));
387   } else if (J_is_mffd) {
388     PetscCall(MatAssemblyBegin(J, MAT_FINAL_ASSEMBLY));
389     PetscCall(MatAssemblyEnd(J, MAT_FINAL_ASSEMBLY));
390   }
391   PetscFunctionReturn(0);
392 }
393 
394 PetscErrorCode WriteOutput(User user, Vec Q, PetscInt step_no, PetscScalar time) {
395   Vec         Q_loc;
396   char        file_path[PETSC_MAX_PATH_LEN];
397   PetscViewer viewer;
398   PetscFunctionBeginUser;
399 
400   if (user->app_ctx->checkpoint_vtk) {
401     // Set up output
402     PetscCall(DMGetLocalVector(user->dm, &Q_loc));
403     PetscCall(PetscObjectSetName((PetscObject)Q_loc, "StateVec"));
404     PetscCall(VecZeroEntries(Q_loc));
405     PetscCall(DMGlobalToLocal(user->dm, Q, INSERT_VALUES, Q_loc));
406 
407     // Output
408     PetscCall(PetscSNPrintf(file_path, sizeof file_path, "%s/ns-%03" PetscInt_FMT ".vtu", user->app_ctx->output_dir, step_no));
409 
410     PetscCall(PetscViewerVTKOpen(PetscObjectComm((PetscObject)Q), file_path, FILE_MODE_WRITE, &viewer));
411     PetscCall(VecView(Q_loc, viewer));
412     PetscCall(PetscViewerDestroy(&viewer));
413     if (user->dm_viz) {
414       Vec         Q_refined, Q_refined_loc;
415       char        file_path_refined[PETSC_MAX_PATH_LEN];
416       PetscViewer viewer_refined;
417 
418       PetscCall(DMGetGlobalVector(user->dm_viz, &Q_refined));
419       PetscCall(DMGetLocalVector(user->dm_viz, &Q_refined_loc));
420       PetscCall(PetscObjectSetName((PetscObject)Q_refined_loc, "Refined"));
421 
422       PetscCall(MatInterpolate(user->interp_viz, Q, Q_refined));
423       PetscCall(VecZeroEntries(Q_refined_loc));
424       PetscCall(DMGlobalToLocal(user->dm_viz, Q_refined, INSERT_VALUES, Q_refined_loc));
425 
426       PetscCall(
427           PetscSNPrintf(file_path_refined, sizeof file_path_refined, "%s/nsrefined-%03" PetscInt_FMT ".vtu", user->app_ctx->output_dir, step_no));
428 
429       PetscCall(PetscViewerVTKOpen(PetscObjectComm((PetscObject)Q_refined), file_path_refined, FILE_MODE_WRITE, &viewer_refined));
430       PetscCall(VecView(Q_refined_loc, viewer_refined));
431       PetscCall(DMRestoreLocalVector(user->dm_viz, &Q_refined_loc));
432       PetscCall(DMRestoreGlobalVector(user->dm_viz, &Q_refined));
433       PetscCall(PetscViewerDestroy(&viewer_refined));
434     }
435     PetscCall(DMRestoreLocalVector(user->dm, &Q_loc));
436   }
437 
438   // Save data in a binary file for continuation of simulations
439   if (user->app_ctx->add_stepnum2bin) {
440     PetscCall(PetscSNPrintf(file_path, sizeof file_path, "%s/ns-solution-%" PetscInt_FMT ".bin", user->app_ctx->output_dir, step_no));
441   } else {
442     PetscCall(PetscSNPrintf(file_path, sizeof file_path, "%s/ns-solution.bin", user->app_ctx->output_dir));
443   }
444   PetscCall(PetscViewerBinaryOpen(user->comm, file_path, FILE_MODE_WRITE, &viewer));
445 
446   PetscInt token = FLUIDS_FILE_TOKEN;
447   PetscCall(PetscViewerBinaryWrite(viewer, &token, 1, PETSC_INT));
448   PetscCall(PetscViewerBinaryWrite(viewer, &step_no, 1, PETSC_INT));
449   time /= user->units->second;  // Dimensionalize time back
450   PetscCall(PetscViewerBinaryWrite(viewer, &time, 1, PETSC_REAL));
451   PetscCall(VecView(Q, viewer));
452   PetscCall(PetscViewerDestroy(&viewer));
453   PetscFunctionReturn(0);
454 }
455 
456 // CSV Monitor
457 PetscErrorCode TSMonitor_WallForce(TS ts, PetscInt step_no, PetscReal time, Vec Q, void *ctx) {
458   User              user = ctx;
459   Vec               G_loc;
460   PetscInt          num_wall = user->app_ctx->wall_forces.num_wall, dim = 3;
461   const PetscInt   *walls  = user->app_ctx->wall_forces.walls;
462   PetscViewer       viewer = user->app_ctx->wall_forces.viewer;
463   PetscViewerFormat format = user->app_ctx->wall_forces.viewer_format;
464   PetscScalar      *reaction_force;
465   PetscBool         iascii;
466 
467   PetscFunctionBeginUser;
468   if (!viewer) PetscFunctionReturn(0);
469   PetscCall(DMGetNamedLocalVector(user->dm, "ResidualLocal", &G_loc));
470   PetscCall(PetscMalloc1(num_wall * dim, &reaction_force));
471   PetscCall(Surface_Forces_NS(user->dm, G_loc, num_wall, walls, reaction_force));
472   PetscCall(DMRestoreNamedLocalVector(user->dm, "ResidualLocal", &G_loc));
473 
474   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
475 
476   if (iascii) {
477     if (format == PETSC_VIEWER_ASCII_CSV && !user->app_ctx->wall_forces.header_written) {
478       PetscCall(PetscViewerASCIIPrintf(viewer, "Step,Time,Wall,ForceX,ForceY,ForceZ\n"));
479       user->app_ctx->wall_forces.header_written = PETSC_TRUE;
480     }
481     for (PetscInt w = 0; w < num_wall; w++) {
482       PetscInt wall = walls[w];
483       if (format == PETSC_VIEWER_ASCII_CSV) {
484         PetscCall(PetscViewerASCIIPrintf(viewer, "%" PetscInt_FMT ",%g,%" PetscInt_FMT ",%g,%g,%g\n", step_no, time, wall,
485                                          reaction_force[w * dim + 0], reaction_force[w * dim + 1], reaction_force[w * dim + 2]));
486 
487       } else {
488         PetscCall(PetscViewerASCIIPrintf(viewer, "Wall %" PetscInt_FMT " Forces: Force_x = %12g, Force_y = %12g, Force_z = %12g\n", wall,
489                                          reaction_force[w * dim + 0], reaction_force[w * dim + 1], reaction_force[w * dim + 2]));
490       }
491     }
492   }
493   PetscCall(PetscFree(reaction_force));
494   PetscFunctionReturn(0);
495 }
496 
497 // User provided TS Monitor
498 PetscErrorCode TSMonitor_NS(TS ts, PetscInt step_no, PetscReal time, Vec Q, void *ctx) {
499   User user = ctx;
500   PetscFunctionBeginUser;
501 
502   // Print every 'checkpoint_interval' steps
503   if (user->app_ctx->checkpoint_interval <= 0 || step_no % user->app_ctx->checkpoint_interval != 0 ||
504       (user->app_ctx->cont_steps == step_no && step_no != 0)) {
505     PetscFunctionReturn(0);
506   }
507 
508   PetscCall(WriteOutput(user, Q, step_no, time));
509 
510   PetscFunctionReturn(0);
511 }
512 
513 // TS: Create, setup, and solve
514 PetscErrorCode TSSolve_NS(DM dm, User user, AppCtx app_ctx, Physics phys, Vec *Q, PetscScalar *f_time, TS *ts) {
515   MPI_Comm    comm = user->comm;
516   TSAdapt     adapt;
517   PetscScalar final_time;
518   PetscFunctionBeginUser;
519 
520   PetscCall(TSCreate(comm, ts));
521   PetscCall(TSSetDM(*ts, dm));
522   if (phys->implicit) {
523     PetscCall(TSSetType(*ts, TSBDF));
524     if (user->op_ifunction) {
525       PetscCall(TSSetIFunction(*ts, NULL, IFunction_NS, &user));
526     } else {  // Implicit integrators can fall back to using an RHSFunction
527       PetscCall(TSSetRHSFunction(*ts, NULL, RHS_NS, &user));
528     }
529     if (user->op_ijacobian) {
530       PetscCall(DMTSSetIJacobian(dm, FormIJacobian_NS, &user));
531       if (app_ctx->amat_type) {
532         Mat Pmat, Amat;
533         PetscCall(DMCreateMatrix(dm, &Pmat));
534         PetscCall(DMSetMatType(dm, app_ctx->amat_type));
535         PetscCall(DMCreateMatrix(dm, &Amat));
536         PetscCall(TSSetIJacobian(*ts, Amat, Pmat, NULL, NULL));
537         PetscCall(MatDestroy(&Amat));
538         PetscCall(MatDestroy(&Pmat));
539       }
540     }
541   } else {
542     PetscCheck(user->op_rhs, comm, PETSC_ERR_ARG_NULL, "Problem does not provide RHSFunction");
543     PetscCall(TSSetType(*ts, TSRK));
544     PetscCall(TSRKSetType(*ts, TSRK5F));
545     PetscCall(TSSetRHSFunction(*ts, NULL, RHS_NS, &user));
546   }
547   PetscCall(TSSetMaxTime(*ts, 500. * user->units->second));
548   PetscCall(TSSetExactFinalTime(*ts, TS_EXACTFINALTIME_STEPOVER));
549   if (app_ctx->test_type == TESTTYPE_NONE) PetscCall(TSSetErrorIfStepFails(*ts, PETSC_FALSE));
550   PetscCall(TSSetTimeStep(*ts, 1.e-2 * user->units->second));
551   if (app_ctx->test_type != TESTTYPE_NONE) {
552     PetscCall(TSSetMaxSteps(*ts, 10));
553   }
554   PetscCall(TSGetAdapt(*ts, &adapt));
555   PetscCall(TSAdaptSetStepLimits(adapt, 1.e-12 * user->units->second, 1.e2 * user->units->second));
556   PetscCall(TSSetFromOptions(*ts));
557   user->time_bc_set = -1.0;    // require all BCs be updated
558   if (!app_ctx->cont_steps) {  // print initial condition
559     if (app_ctx->test_type == TESTTYPE_NONE) {
560       PetscCall(TSMonitor_NS(*ts, 0, 0., *Q, user));
561     }
562   } else {  // continue from time of last output
563     PetscInt    count;
564     PetscViewer viewer;
565 
566     if (app_ctx->cont_time <= 0) {  // Legacy files did not include step number and time
567       PetscCall(PetscViewerBinaryOpen(comm, app_ctx->cont_time_file, FILE_MODE_READ, &viewer));
568       PetscCall(PetscViewerBinaryRead(viewer, &app_ctx->cont_time, 1, &count, PETSC_REAL));
569       PetscCall(PetscViewerDestroy(&viewer));
570       PetscCheck(app_ctx->cont_steps != -1, comm, PETSC_ERR_ARG_INCOMP,
571                  "-continue step number not specified, but checkpoint file does not contain a step number (likely written by older code version)");
572     }
573     PetscCall(TSSetTime(*ts, app_ctx->cont_time * user->units->second));
574     PetscCall(TSSetStepNumber(*ts, app_ctx->cont_steps));
575   }
576   if (app_ctx->test_type == TESTTYPE_NONE) {
577     PetscCall(TSMonitorSet(*ts, TSMonitor_NS, user, NULL));
578   }
579   if (app_ctx->wall_forces.viewer) {
580     PetscCall(TSMonitorSet(*ts, TSMonitor_WallForce, user, NULL));
581   }
582   if (app_ctx->turb_spanstats_enable) {
583     PetscCall(TSMonitorSet(*ts, TSMonitor_Statistics, user, NULL));
584     CeedScalar previous_time = app_ctx->cont_time * user->units->second;
585     CeedOperatorSetContextDouble(user->spanstats.op_stats_collect, user->spanstats.previous_time_label, &previous_time);
586   }
587 
588   // Solve
589   PetscReal start_time;
590   PetscInt  start_step;
591   PetscCall(TSGetTime(*ts, &start_time));
592   PetscCall(TSGetStepNumber(*ts, &start_step));
593 
594   PetscPreLoadBegin(PETSC_FALSE, "Fluids Solve");
595   PetscCall(TSSetTime(*ts, start_time));
596   PetscCall(TSSetStepNumber(*ts, start_step));
597   if (PetscPreLoadingOn) {
598     // LCOV_EXCL_START
599     SNES      snes;
600     Vec       Q_preload;
601     PetscReal rtol;
602     PetscCall(VecDuplicate(*Q, &Q_preload));
603     PetscCall(VecCopy(*Q, Q_preload));
604     PetscCall(TSGetSNES(*ts, &snes));
605     PetscCall(SNESGetTolerances(snes, NULL, &rtol, NULL, NULL, NULL));
606     PetscCall(SNESSetTolerances(snes, PETSC_DEFAULT, .99, PETSC_DEFAULT, PETSC_DEFAULT, PETSC_DEFAULT));
607     PetscCall(TSSetSolution(*ts, *Q));
608     PetscCall(TSStep(*ts));
609     PetscCall(SNESSetTolerances(snes, PETSC_DEFAULT, rtol, PETSC_DEFAULT, PETSC_DEFAULT, PETSC_DEFAULT));
610     PetscCall(VecDestroy(&Q_preload));
611     // LCOV_EXCL_STOP
612   } else {
613     PetscCall(PetscBarrier((PetscObject)*ts));
614     PetscCall(TSSolve(*ts, *Q));
615   }
616   PetscPreLoadEnd();
617 
618   PetscCall(TSGetSolveTime(*ts, &final_time));
619   *f_time = final_time;
620 
621   if (app_ctx->test_type == TESTTYPE_NONE) {
622     PetscInt step_no;
623     PetscCall(TSGetStepNumber(*ts, &step_no));
624     if (user->app_ctx->checkpoint_interval > 0 || user->app_ctx->checkpoint_interval == -1) {
625       PetscCall(WriteOutput(user, *Q, step_no, final_time));
626     }
627 
628     PetscLogEvent stage_id;
629     PetscStageLog stage_log;
630 
631     PetscCall(PetscLogStageGetId("Fluids Solve", &stage_id));
632     PetscCall(PetscLogGetStageLog(&stage_log));
633     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Time taken for solution (sec): %g\n", stage_log->stageInfo[stage_id].perfInfo.time));
634   }
635   PetscFunctionReturn(0);
636 }
637