xref: /honee/src/misc.c (revision 676080b4cb0b4c593f7c39e26e1c02044ea4890e)
1 // Copyright (c) 2017-2022, Lawrence Livermore National Security, LLC and other CEED contributors.
2 // All Rights Reserved. See the top-level LICENSE and NOTICE files for details.
3 //
4 // SPDX-License-Identifier: BSD-2-Clause
5 //
6 // This file is part of CEED:  http://github.com/ceed
7 
8 /// @file
9 /// Miscellaneous utility functions
10 
11 #include <ceed.h>
12 #include <petscdm.h>
13 #include <petscts.h>
14 
15 #include "../navierstokes.h"
16 #include "../qfunctions/mass.h"
17 
18 PetscErrorCode ICs_FixMultiplicity(DM dm, CeedData ceed_data, User user, Vec Q_loc, Vec Q, CeedScalar time) {
19   PetscFunctionBeginUser;
20 
21   // ---------------------------------------------------------------------------
22   // Update time for evaluation
23   // ---------------------------------------------------------------------------
24   if (user->phys->ics_time_label) CeedOperatorSetContextDouble(ceed_data->op_ics, user->phys->ics_time_label, &time);
25 
26   // ---------------------------------------------------------------------------
27   // ICs
28   // ---------------------------------------------------------------------------
29   // -- CEED Restriction
30   CeedVector q0_ceed;
31   CeedElemRestrictionCreateVector(ceed_data->elem_restr_q, &q0_ceed, NULL);
32 
33   // -- Place PETSc vector in CEED vector
34   PetscMemType q0_mem_type;
35   PetscCall(VecP2C(Q_loc, &q0_mem_type, q0_ceed));
36 
37   // -- Apply CEED Operator
38   CeedOperatorApply(ceed_data->op_ics, ceed_data->x_coord, q0_ceed, CEED_REQUEST_IMMEDIATE);
39 
40   // -- Restore vectors
41   PetscCall(VecC2P(q0_ceed, q0_mem_type, Q_loc));
42 
43   // -- Local-to-Global
44   PetscCall(VecZeroEntries(Q));
45   PetscCall(DMLocalToGlobal(dm, Q_loc, ADD_VALUES, Q));
46 
47   // ---------------------------------------------------------------------------
48   // Fix multiplicity for output of ICs
49   // ---------------------------------------------------------------------------
50   // -- CEED Restriction
51   CeedVector mult_vec;
52   CeedElemRestrictionCreateVector(ceed_data->elem_restr_q, &mult_vec, NULL);
53 
54   // -- Place PETSc vector in CEED vector
55   PetscMemType m_mem_type;
56   Vec          multiplicity_loc;
57   PetscCall(DMGetLocalVector(dm, &multiplicity_loc));
58   PetscCall(VecP2C(multiplicity_loc, &m_mem_type, mult_vec));
59 
60   // -- Get multiplicity
61   CeedElemRestrictionGetMultiplicity(ceed_data->elem_restr_q, mult_vec);
62 
63   // -- Restore vectors
64   PetscCall(VecC2P(mult_vec, m_mem_type, multiplicity_loc));
65 
66   // -- Local-to-Global
67   Vec multiplicity;
68   PetscCall(DMGetGlobalVector(dm, &multiplicity));
69   PetscCall(VecZeroEntries(multiplicity));
70   PetscCall(DMLocalToGlobal(dm, multiplicity_loc, ADD_VALUES, multiplicity));
71 
72   // -- Fix multiplicity
73   PetscCall(VecPointwiseDivide(Q, Q, multiplicity));
74   PetscCall(VecPointwiseDivide(Q_loc, Q_loc, multiplicity_loc));
75 
76   // -- Restore vectors
77   PetscCall(DMRestoreLocalVector(dm, &multiplicity_loc));
78   PetscCall(DMRestoreGlobalVector(dm, &multiplicity));
79 
80   // Cleanup
81   CeedVectorDestroy(&mult_vec);
82   CeedVectorDestroy(&q0_ceed);
83 
84   PetscFunctionReturn(0);
85 }
86 
87 PetscErrorCode DMPlexInsertBoundaryValues_NS(DM dm, PetscBool insert_essential, Vec Q_loc, PetscReal time, Vec face_geom_FVM, Vec cell_geom_FVM,
88                                              Vec grad_FVM) {
89   Vec Qbc, boundary_mask;
90   PetscFunctionBegin;
91 
92   // Mask (zero) Dirichlet entries
93   PetscCall(DMGetNamedLocalVector(dm, "boundary mask", &boundary_mask));
94   PetscCall(VecPointwiseMult(Q_loc, Q_loc, boundary_mask));
95   PetscCall(DMRestoreNamedLocalVector(dm, "boundary mask", &boundary_mask));
96 
97   PetscCall(DMGetNamedLocalVector(dm, "Qbc", &Qbc));
98   PetscCall(VecAXPY(Q_loc, 1., Qbc));
99   PetscCall(DMRestoreNamedLocalVector(dm, "Qbc", &Qbc));
100 
101   PetscFunctionReturn(0);
102 }
103 
104 // @brief Load vector from binary file, possibly with embedded solution time and step number
105 PetscErrorCode LoadFluidsBinaryVec(MPI_Comm comm, PetscViewer viewer, Vec Q, PetscReal *time, PetscInt *step_number) {
106   PetscInt  token, file_step_number;
107   PetscReal file_time;
108   PetscFunctionBeginUser;
109 
110   // Attempt
111   PetscCall(PetscViewerBinaryRead(viewer, &token, 1, NULL, PETSC_INT));
112   if (token == FLUIDS_FILE_TOKEN) {  // New style format; we're reading a file with step number and time in the header
113     PetscCall(PetscViewerBinaryRead(viewer, &file_step_number, 1, NULL, PETSC_INT));
114     PetscCall(PetscViewerBinaryRead(viewer, &file_time, 1, NULL, PETSC_REAL));
115     if (time) *time = file_time;
116     if (step_number) *step_number = file_step_number;
117   } else if (token == VEC_FILE_CLASSID) {  // Legacy format of just the vector, encoded as [VEC_FILE_CLASSID, length, ]
118     PetscInt length, N;
119     PetscCall(PetscViewerBinaryRead(viewer, &length, 1, NULL, PETSC_INT));
120     PetscCall(VecGetSize(Q, &N));
121     PetscCheck(length == N, comm, PETSC_ERR_ARG_INCOMP, "File Vec has length %" PetscInt_FMT " but DM has global Vec size %" PetscInt_FMT, length, N);
122     PetscCall(PetscViewerBinarySetSkipHeader(viewer, PETSC_TRUE));
123   } else SETERRQ(comm, PETSC_ERR_FILE_UNEXPECTED, "Not a fluids header token or a PETSc Vec in file");
124 
125   // Load Q from existent solution
126   PetscCall(VecLoad(Q, viewer));
127 
128   PetscFunctionReturn(0);
129 }
130 
131 // Compare reference solution values with current test run for CI
132 PetscErrorCode RegressionTests_NS(AppCtx app_ctx, Vec Q) {
133   Vec         Qref;
134   PetscViewer viewer;
135   PetscReal   error, Qrefnorm;
136   MPI_Comm    comm = PetscObjectComm((PetscObject)Q);
137   PetscFunctionBegin;
138 
139   // Read reference file
140   PetscCall(VecDuplicate(Q, &Qref));
141   PetscCall(PetscViewerBinaryOpen(comm, app_ctx->test_file_path, FILE_MODE_READ, &viewer));
142   PetscCall(LoadFluidsBinaryVec(comm, viewer, Qref, NULL, NULL));
143 
144   // Compute error with respect to reference solution
145   PetscCall(VecAXPY(Q, -1.0, Qref));
146   PetscCall(VecNorm(Qref, NORM_MAX, &Qrefnorm));
147   PetscCall(VecScale(Q, 1. / Qrefnorm));
148   PetscCall(VecNorm(Q, NORM_MAX, &error));
149 
150   // Check error
151   if (error > app_ctx->test_tol) {
152     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Test failed with error norm %g\n", (double)error));
153   }
154 
155   // Cleanup
156   PetscCall(PetscViewerDestroy(&viewer));
157   PetscCall(VecDestroy(&Qref));
158 
159   PetscFunctionReturn(0);
160 }
161 
162 // Get error for problems with exact solutions
163 PetscErrorCode GetError_NS(CeedData ceed_data, DM dm, User user, Vec Q, PetscScalar final_time) {
164   PetscInt  loc_nodes;
165   Vec       Q_exact, Q_exact_loc;
166   PetscReal rel_error, norm_error, norm_exact;
167   PetscFunctionBegin;
168 
169   // Get exact solution at final time
170   PetscCall(DMCreateGlobalVector(dm, &Q_exact));
171   PetscCall(DMGetLocalVector(dm, &Q_exact_loc));
172   PetscCall(VecGetSize(Q_exact_loc, &loc_nodes));
173   PetscCall(ICs_FixMultiplicity(dm, ceed_data, user, Q_exact_loc, Q_exact, final_time));
174 
175   // Get |exact solution - obtained solution|
176   PetscCall(VecNorm(Q_exact, NORM_1, &norm_exact));
177   PetscCall(VecAXPY(Q, -1.0, Q_exact));
178   PetscCall(VecNorm(Q, NORM_1, &norm_error));
179 
180   // Compute relative error
181   rel_error = norm_error / norm_exact;
182 
183   // Output relative error
184   PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Relative Error: %g\n", (double)rel_error));
185   // Cleanup
186   PetscCall(DMRestoreLocalVector(dm, &Q_exact_loc));
187   PetscCall(VecDestroy(&Q_exact));
188 
189   PetscFunctionReturn(0);
190 }
191 
192 // Post-processing
193 PetscErrorCode PostProcess_NS(TS ts, CeedData ceed_data, DM dm, ProblemData *problem, User user, Vec Q, PetscScalar final_time) {
194   PetscInt          steps;
195   TSConvergedReason reason;
196   PetscFunctionBegin;
197 
198   // Print relative error
199   if (problem->non_zero_time && user->app_ctx->test_type == TESTTYPE_NONE) {
200     PetscCall(GetError_NS(ceed_data, dm, user, Q, final_time));
201   }
202 
203   // Print final time and number of steps
204   PetscCall(TSGetStepNumber(ts, &steps));
205   PetscCall(TSGetConvergedReason(ts, &reason));
206   if (user->app_ctx->test_type == TESTTYPE_NONE) {
207     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Time integrator %s on time step %" PetscInt_FMT " with final time %g\n", TSConvergedReasons[reason],
208                           steps, (double)final_time));
209   }
210 
211   // Output numerical values from command line
212   PetscCall(VecViewFromOptions(Q, NULL, "-vec_view"));
213 
214   // Compare reference solution values with current test run for CI
215   if (user->app_ctx->test_type == TESTTYPE_SOLVER) {
216     PetscCall(RegressionTests_NS(user->app_ctx, Q));
217   }
218 
219   PetscFunctionReturn(0);
220 }
221 
222 const PetscInt FLUIDS_FILE_TOKEN = 0xceedf00;
223 
224 // Gather initial Q values in case of continuation of simulation
225 PetscErrorCode SetupICsFromBinary(MPI_Comm comm, AppCtx app_ctx, Vec Q) {
226   PetscViewer viewer;
227 
228   PetscFunctionBegin;
229 
230   PetscCall(PetscViewerBinaryOpen(comm, app_ctx->cont_file, FILE_MODE_READ, &viewer));
231   PetscCall(LoadFluidsBinaryVec(comm, viewer, Q, &app_ctx->cont_time, &app_ctx->cont_steps));
232   PetscCall(PetscViewerDestroy(&viewer));
233 
234   PetscFunctionReturn(0);
235 }
236 
237 // Record boundary values from initial condition
238 PetscErrorCode SetBCsFromICs_NS(DM dm, Vec Q, Vec Q_loc) {
239   Vec Qbc, boundary_mask;
240   PetscFunctionBegin;
241 
242   PetscCall(DMGetNamedLocalVector(dm, "Qbc", &Qbc));
243   PetscCall(VecCopy(Q_loc, Qbc));
244   PetscCall(VecZeroEntries(Q_loc));
245   PetscCall(DMGlobalToLocal(dm, Q, INSERT_VALUES, Q_loc));
246   PetscCall(VecAXPY(Qbc, -1., Q_loc));
247   PetscCall(DMRestoreNamedLocalVector(dm, "Qbc", &Qbc));
248   PetscCall(PetscObjectComposeFunction((PetscObject)dm, "DMPlexInsertBoundaryValues_C", DMPlexInsertBoundaryValues_NS));
249 
250   PetscCall(DMGetNamedLocalVector(dm, "boundary mask", &boundary_mask));
251   PetscCall(DMGetGlobalVector(dm, &Q));
252   PetscCall(VecZeroEntries(boundary_mask));
253   PetscCall(VecSet(Q, 1.0));
254   PetscCall(DMGlobalToLocal(dm, Q, INSERT_VALUES, boundary_mask));
255   PetscCall(DMRestoreNamedLocalVector(dm, "boundary mask", &boundary_mask));
256 
257   PetscFunctionReturn(0);
258 }
259 
260 // Free a plain data context that was allocated using PETSc; returning libCEED error codes
261 int FreeContextPetsc(void *data) {
262   if (PetscFree(data)) return CeedError(NULL, CEED_ERROR_ACCESS, "PetscFree failed");
263   return CEED_ERROR_SUCCESS;
264 }
265 
266 // Return mass qfunction specification for number of components N
267 PetscErrorCode CreateMassQFunction(Ceed ceed, CeedInt N, CeedInt q_data_size, CeedQFunction *qf) {
268   CeedQFunctionUser qfunction_ptr;
269   const char       *qfunction_loc;
270   PetscFunctionBeginUser;
271 
272   switch (N) {
273     case 1:
274       qfunction_ptr = Mass_1;
275       qfunction_loc = Mass_1_loc;
276       break;
277     case 5:
278       qfunction_ptr = Mass_5;
279       qfunction_loc = Mass_5_loc;
280       break;
281     case 9:
282       qfunction_ptr = Mass_9;
283       qfunction_loc = Mass_9_loc;
284       break;
285     case 22:
286       qfunction_ptr = Mass_22;
287       qfunction_loc = Mass_22_loc;
288       break;
289     default:
290       SETERRQ(PETSC_COMM_WORLD, -1, "Could not find mass qfunction of size %d", N);
291   }
292   CeedQFunctionCreateInterior(ceed, 1, qfunction_ptr, qfunction_loc, qf);
293 
294   CeedQFunctionAddInput(*qf, "u", N, CEED_EVAL_INTERP);
295   CeedQFunctionAddInput(*qf, "qdata", q_data_size, CEED_EVAL_NONE);
296   CeedQFunctionAddOutput(*qf, "v", N, CEED_EVAL_INTERP);
297   PetscFunctionReturn(0);
298 }
299 
300 /* @brief L^2 Projection of a source FEM function to a target FEM space
301  *
302  * To solve system using a lumped mass matrix, pass a KSP object with ksp_type=preonly, pc_type=jacobi, pc_jacobi_type=rowsum.
303  *
304  * @param[in]  source_vec    Global Vec of the source FEM function. NULL indicates using rhs_matop_ctx->X_loc
305  * @param[out] target_vec    Global Vec of the target (result) FEM function. NULL indicates using rhs_matop_ctx->Y_loc
306  * @param[in]  rhs_matop_ctx MatopApplyContext for performing the RHS evaluation
307  * @param[in]  ksp           KSP for solving the consistent projection problem
308  */
309 PetscErrorCode ComputeL2Projection(Vec source_vec, Vec target_vec, OperatorApplyContext rhs_matop_ctx, KSP ksp) {
310   PetscFunctionBeginUser;
311 
312   PetscCall(ApplyCeedOperatorGlobalToGlobal(source_vec, target_vec, rhs_matop_ctx));
313   PetscCall(KSPSolve(ksp, target_vec, target_vec));
314 
315   PetscFunctionReturn(0);
316 }
317 
318 /*
319  * @brief Open a PHASTA *.dat file, grabbing dimensions and file pointer
320  *
321  * This function opens the file specified by `path` using `PetscFOpen` and passes the file pointer in `fp`.
322  * It is not closed in this function, thus `fp` must be closed sometime after this function has been called (using `PetscFClose` for example).
323  *
324  * Assumes that the first line of the file has the number of rows and columns as the only two entries, separated by a single space.
325  *
326  * @param[in]  comm           MPI_Comm for the program
327  * @param[in]  path           Path to the file
328  * @param[in]  char_array_len Length of the character array that should contain each line
329  * @param[out] dims           Dimensions of the file, taken from the first line of the file
330  * @param[out] fp File        pointer to the opened file
331  */
332 PetscErrorCode PHASTADatFileOpen(const MPI_Comm comm, const char path[PETSC_MAX_PATH_LEN], const PetscInt char_array_len, PetscInt dims[2],
333                                  FILE **fp) {
334   PetscInt ndims;
335   char     line[char_array_len];
336   char   **array;
337 
338   PetscFunctionBeginUser;
339   PetscCall(PetscFOpen(comm, path, "r", fp));
340   PetscCall(PetscSynchronizedFGets(comm, *fp, char_array_len, line));
341   PetscCall(PetscStrToArray(line, ' ', &ndims, &array));
342   if (ndims != 2) {
343     SETERRQ(comm, -1, "Found %" PetscInt_FMT " dimensions instead of 2 on the first line of %s", ndims, path);
344   }
345 
346   for (PetscInt i = 0; i < ndims; i++) dims[i] = atoi(array[i]);
347   PetscCall(PetscStrToArrayDestroy(ndims, array));
348 
349   PetscFunctionReturn(0);
350 }
351 
352 /*
353  * @brief Get the number of rows for the PHASTA file at path.
354  *
355  * Assumes that the first line of the file has the number of rows and columns as the only two entries, separated by a single space.
356  *
357  * @param[in]  comm  MPI_Comm for the program
358  * @param[in]  path  Path to the file
359  * @param[out] nrows Number of rows
360  */
361 PetscErrorCode PHASTADatFileGetNRows(const MPI_Comm comm, const char path[PETSC_MAX_PATH_LEN], PetscInt *nrows) {
362   const PetscInt char_array_len = 512;
363   PetscInt       dims[2];
364   FILE          *fp;
365 
366   PetscFunctionBeginUser;
367   PetscCall(PHASTADatFileOpen(comm, path, char_array_len, dims, &fp));
368   *nrows = dims[0];
369   PetscCall(PetscFClose(comm, fp));
370 
371   PetscFunctionReturn(0);
372 }
373