xref: /honee/src/misc.c (revision 68d8e747b442853c5d1963db03f2a777eea711bf)
1 // Copyright (c) 2017-2022, Lawrence Livermore National Security, LLC and other CEED contributors.
2 // All Rights Reserved. See the top-level LICENSE and NOTICE files for details.
3 //
4 // SPDX-License-Identifier: BSD-2-Clause
5 //
6 // This file is part of CEED:  http://github.com/ceed
7 
8 /// @file
9 /// Miscellaneous utility functions
10 
11 #include <ceed.h>
12 #include <petscdm.h>
13 #include <petscts.h>
14 
15 #include "../navierstokes.h"
16 #include "../qfunctions/mass.h"
17 
18 PetscErrorCode ICs_FixMultiplicity(DM dm, CeedData ceed_data, User user, Vec Q_loc, Vec Q, CeedScalar time) {
19   PetscFunctionBeginUser;
20 
21   // ---------------------------------------------------------------------------
22   // Update time for evaluation
23   // ---------------------------------------------------------------------------
24   if (user->phys->ics_time_label) CeedOperatorSetContextDouble(ceed_data->op_ics_ctx->op, user->phys->ics_time_label, &time);
25 
26   // ---------------------------------------------------------------------------
27   // ICs
28   // ---------------------------------------------------------------------------
29   // -- CEED Restriction
30   CeedVector q0_ceed;
31   CeedElemRestrictionCreateVector(ceed_data->elem_restr_q, &q0_ceed, NULL);
32 
33   // -- Place PETSc vector in CEED vector
34   PetscCall(ApplyCeedOperatorLocalToGlobal(NULL, Q, ceed_data->op_ics_ctx));
35 
36   // ---------------------------------------------------------------------------
37   // Fix multiplicity for output of ICs
38   // ---------------------------------------------------------------------------
39   // -- CEED Restriction
40   CeedVector mult_vec;
41   CeedElemRestrictionCreateVector(ceed_data->elem_restr_q, &mult_vec, NULL);
42 
43   // -- Place PETSc vector in CEED vector
44   PetscMemType m_mem_type;
45   Vec          multiplicity_loc;
46   PetscCall(DMGetLocalVector(dm, &multiplicity_loc));
47   PetscCall(VecP2C(multiplicity_loc, &m_mem_type, mult_vec));
48 
49   // -- Get multiplicity
50   CeedElemRestrictionGetMultiplicity(ceed_data->elem_restr_q, mult_vec);
51 
52   // -- Restore vectors
53   PetscCall(VecC2P(mult_vec, m_mem_type, multiplicity_loc));
54 
55   // -- Local-to-Global
56   Vec multiplicity;
57   PetscCall(DMGetGlobalVector(dm, &multiplicity));
58   PetscCall(VecZeroEntries(multiplicity));
59   PetscCall(DMLocalToGlobal(dm, multiplicity_loc, ADD_VALUES, multiplicity));
60 
61   // -- Fix multiplicity
62   PetscCall(VecPointwiseDivide(Q, Q, multiplicity));
63   PetscCall(VecPointwiseDivide(Q_loc, Q_loc, multiplicity_loc));
64 
65   // -- Restore vectors
66   PetscCall(DMRestoreLocalVector(dm, &multiplicity_loc));
67   PetscCall(DMRestoreGlobalVector(dm, &multiplicity));
68 
69   // Cleanup
70   CeedVectorDestroy(&mult_vec);
71   CeedVectorDestroy(&q0_ceed);
72 
73   PetscFunctionReturn(0);
74 }
75 
76 PetscErrorCode DMPlexInsertBoundaryValues_NS(DM dm, PetscBool insert_essential, Vec Q_loc, PetscReal time, Vec face_geom_FVM, Vec cell_geom_FVM,
77                                              Vec grad_FVM) {
78   Vec Qbc, boundary_mask;
79   PetscFunctionBegin;
80 
81   // Mask (zero) Strong BC entries
82   PetscCall(DMGetNamedLocalVector(dm, "boundary mask", &boundary_mask));
83   PetscCall(VecPointwiseMult(Q_loc, Q_loc, boundary_mask));
84   PetscCall(DMRestoreNamedLocalVector(dm, "boundary mask", &boundary_mask));
85 
86   PetscCall(DMGetNamedLocalVector(dm, "Qbc", &Qbc));
87   PetscCall(VecAXPY(Q_loc, 1., Qbc));
88   PetscCall(DMRestoreNamedLocalVector(dm, "Qbc", &Qbc));
89 
90   PetscFunctionReturn(0);
91 }
92 
93 // @brief Load vector from binary file, possibly with embedded solution time and step number
94 PetscErrorCode LoadFluidsBinaryVec(MPI_Comm comm, PetscViewer viewer, Vec Q, PetscReal *time, PetscInt *step_number) {
95   PetscInt  token, file_step_number;
96   PetscReal file_time;
97   PetscFunctionBeginUser;
98 
99   // Attempt
100   PetscCall(PetscViewerBinaryRead(viewer, &token, 1, NULL, PETSC_INT));
101   if (token == FLUIDS_FILE_TOKEN) {  // New style format; we're reading a file with step number and time in the header
102     PetscCall(PetscViewerBinaryRead(viewer, &file_step_number, 1, NULL, PETSC_INT));
103     PetscCall(PetscViewerBinaryRead(viewer, &file_time, 1, NULL, PETSC_REAL));
104     if (time) *time = file_time;
105     if (step_number) *step_number = file_step_number;
106   } else if (token == VEC_FILE_CLASSID) {  // Legacy format of just the vector, encoded as [VEC_FILE_CLASSID, length, ]
107     PetscInt length, N;
108     PetscCall(PetscViewerBinaryRead(viewer, &length, 1, NULL, PETSC_INT));
109     PetscCall(VecGetSize(Q, &N));
110     PetscCheck(length == N, comm, PETSC_ERR_ARG_INCOMP, "File Vec has length %" PetscInt_FMT " but DM has global Vec size %" PetscInt_FMT, length, N);
111     PetscCall(PetscViewerBinarySetSkipHeader(viewer, PETSC_TRUE));
112   } else SETERRQ(comm, PETSC_ERR_FILE_UNEXPECTED, "Not a fluids header token or a PETSc Vec in file");
113 
114   // Load Q from existent solution
115   PetscCall(VecLoad(Q, viewer));
116 
117   PetscFunctionReturn(0);
118 }
119 
120 // Compare reference solution values with current test run for CI
121 PetscErrorCode RegressionTests_NS(AppCtx app_ctx, Vec Q) {
122   Vec         Qref;
123   PetscViewer viewer;
124   PetscReal   error, Qrefnorm;
125   MPI_Comm    comm = PetscObjectComm((PetscObject)Q);
126   PetscFunctionBegin;
127 
128   // Read reference file
129   PetscCall(VecDuplicate(Q, &Qref));
130   PetscCall(PetscViewerBinaryOpen(comm, app_ctx->test_file_path, FILE_MODE_READ, &viewer));
131   PetscCall(LoadFluidsBinaryVec(comm, viewer, Qref, NULL, NULL));
132 
133   // Compute error with respect to reference solution
134   PetscCall(VecAXPY(Q, -1.0, Qref));
135   PetscCall(VecNorm(Qref, NORM_MAX, &Qrefnorm));
136   PetscCall(VecScale(Q, 1. / Qrefnorm));
137   PetscCall(VecNorm(Q, NORM_MAX, &error));
138 
139   // Check error
140   if (error > app_ctx->test_tol) {
141     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Test failed with error norm %g\n", (double)error));
142   }
143 
144   // Cleanup
145   PetscCall(PetscViewerDestroy(&viewer));
146   PetscCall(VecDestroy(&Qref));
147 
148   PetscFunctionReturn(0);
149 }
150 
151 // Get error for problems with exact solutions
152 PetscErrorCode GetError_NS(CeedData ceed_data, DM dm, User user, Vec Q, PetscScalar final_time) {
153   PetscInt  loc_nodes;
154   Vec       Q_exact, Q_exact_loc;
155   PetscReal rel_error, norm_error, norm_exact;
156   PetscFunctionBegin;
157 
158   // Get exact solution at final time
159   PetscCall(DMCreateGlobalVector(dm, &Q_exact));
160   PetscCall(DMGetLocalVector(dm, &Q_exact_loc));
161   PetscCall(VecGetSize(Q_exact_loc, &loc_nodes));
162   PetscCall(ICs_FixMultiplicity(dm, ceed_data, user, Q_exact_loc, Q_exact, final_time));
163 
164   // Get |exact solution - obtained solution|
165   PetscCall(VecNorm(Q_exact, NORM_1, &norm_exact));
166   PetscCall(VecAXPY(Q, -1.0, Q_exact));
167   PetscCall(VecNorm(Q, NORM_1, &norm_error));
168 
169   // Compute relative error
170   rel_error = norm_error / norm_exact;
171 
172   // Output relative error
173   PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Relative Error: %g\n", (double)rel_error));
174   // Cleanup
175   PetscCall(DMRestoreLocalVector(dm, &Q_exact_loc));
176   PetscCall(VecDestroy(&Q_exact));
177 
178   PetscFunctionReturn(0);
179 }
180 
181 // Post-processing
182 PetscErrorCode PostProcess_NS(TS ts, CeedData ceed_data, DM dm, ProblemData *problem, User user, Vec Q, PetscScalar final_time) {
183   PetscInt          steps;
184   TSConvergedReason reason;
185   PetscFunctionBegin;
186 
187   // Print relative error
188   if (problem->non_zero_time && user->app_ctx->test_type == TESTTYPE_NONE) {
189     PetscCall(GetError_NS(ceed_data, dm, user, Q, final_time));
190   }
191 
192   // Print final time and number of steps
193   PetscCall(TSGetStepNumber(ts, &steps));
194   PetscCall(TSGetConvergedReason(ts, &reason));
195   if (user->app_ctx->test_type == TESTTYPE_NONE) {
196     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Time integrator %s on time step %" PetscInt_FMT " with final time %g\n", TSConvergedReasons[reason],
197                           steps, (double)final_time));
198   }
199 
200   // Output numerical values from command line
201   PetscCall(VecViewFromOptions(Q, NULL, "-vec_view"));
202 
203   // Compare reference solution values with current test run for CI
204   if (user->app_ctx->test_type == TESTTYPE_SOLVER) {
205     PetscCall(RegressionTests_NS(user->app_ctx, Q));
206   }
207 
208   PetscFunctionReturn(0);
209 }
210 
211 const PetscInt FLUIDS_FILE_TOKEN = 0xceedf00;
212 
213 // Gather initial Q values in case of continuation of simulation
214 PetscErrorCode SetupICsFromBinary(MPI_Comm comm, AppCtx app_ctx, Vec Q) {
215   PetscViewer viewer;
216 
217   PetscFunctionBegin;
218 
219   PetscCall(PetscViewerBinaryOpen(comm, app_ctx->cont_file, FILE_MODE_READ, &viewer));
220   PetscCall(LoadFluidsBinaryVec(comm, viewer, Q, &app_ctx->cont_time, &app_ctx->cont_steps));
221   PetscCall(PetscViewerDestroy(&viewer));
222 
223   PetscFunctionReturn(0);
224 }
225 
226 // Record boundary values from initial condition
227 PetscErrorCode SetBCsFromICs_NS(DM dm, Vec Q, Vec Q_loc) {
228   Vec Qbc, boundary_mask;
229   PetscFunctionBegin;
230 
231   PetscCall(DMGetNamedLocalVector(dm, "Qbc", &Qbc));
232   PetscCall(VecCopy(Q_loc, Qbc));
233   PetscCall(VecZeroEntries(Q_loc));
234   PetscCall(DMGlobalToLocal(dm, Q, INSERT_VALUES, Q_loc));
235   PetscCall(VecAXPY(Qbc, -1., Q_loc));
236   PetscCall(DMRestoreNamedLocalVector(dm, "Qbc", &Qbc));
237   PetscCall(PetscObjectComposeFunction((PetscObject)dm, "DMPlexInsertBoundaryValues_C", DMPlexInsertBoundaryValues_NS));
238 
239   PetscCall(DMGetNamedLocalVector(dm, "boundary mask", &boundary_mask));
240   PetscCall(DMGetGlobalVector(dm, &Q));
241   PetscCall(VecZeroEntries(boundary_mask));
242   PetscCall(VecSet(Q, 1.0));
243   PetscCall(DMGlobalToLocal(dm, Q, INSERT_VALUES, boundary_mask));
244   PetscCall(DMRestoreNamedLocalVector(dm, "boundary mask", &boundary_mask));
245 
246   PetscFunctionReturn(0);
247 }
248 
249 // Free a plain data context that was allocated using PETSc; returning libCEED error codes
250 int FreeContextPetsc(void *data) {
251   if (PetscFree(data)) return CeedError(NULL, CEED_ERROR_ACCESS, "PetscFree failed");
252   return CEED_ERROR_SUCCESS;
253 }
254 
255 // Return mass qfunction specification for number of components N
256 PetscErrorCode CreateMassQFunction(Ceed ceed, CeedInt N, CeedInt q_data_size, CeedQFunction *qf) {
257   CeedQFunctionUser qfunction_ptr;
258   const char       *qfunction_loc;
259   PetscFunctionBeginUser;
260 
261   switch (N) {
262     case 1:
263       qfunction_ptr = Mass_1;
264       qfunction_loc = Mass_1_loc;
265       break;
266     case 5:
267       qfunction_ptr = Mass_5;
268       qfunction_loc = Mass_5_loc;
269       break;
270     case 7:
271       qfunction_ptr = Mass_7;
272       qfunction_loc = Mass_7_loc;
273       break;
274     case 9:
275       qfunction_ptr = Mass_9;
276       qfunction_loc = Mass_9_loc;
277       break;
278     case 22:
279       qfunction_ptr = Mass_22;
280       qfunction_loc = Mass_22_loc;
281       break;
282     default:
283       SETERRQ(PETSC_COMM_WORLD, -1, "Could not find mass qfunction of size %d", N);
284   }
285   CeedQFunctionCreateInterior(ceed, 1, qfunction_ptr, qfunction_loc, qf);
286 
287   CeedQFunctionAddInput(*qf, "u", N, CEED_EVAL_INTERP);
288   CeedQFunctionAddInput(*qf, "qdata", q_data_size, CEED_EVAL_NONE);
289   CeedQFunctionAddOutput(*qf, "v", N, CEED_EVAL_INTERP);
290   PetscFunctionReturn(0);
291 }
292 
293 /* @brief L^2 Projection of a source FEM function to a target FEM space
294  *
295  * To solve system using a lumped mass matrix, pass a KSP object with ksp_type=preonly, pc_type=jacobi, pc_jacobi_type=rowsum.
296  *
297  * @param[in]  source_vec    Global Vec of the source FEM function. NULL indicates using rhs_matop_ctx->X_loc
298  * @param[out] target_vec    Global Vec of the target (result) FEM function. NULL indicates using rhs_matop_ctx->Y_loc
299  * @param[in]  rhs_matop_ctx MatopApplyContext for performing the RHS evaluation
300  * @param[in]  ksp           KSP for solving the consistent projection problem
301  */
302 PetscErrorCode ComputeL2Projection(Vec source_vec, Vec target_vec, OperatorApplyContext rhs_matop_ctx, KSP ksp) {
303   PetscFunctionBeginUser;
304 
305   PetscCall(ApplyCeedOperatorGlobalToGlobal(source_vec, target_vec, rhs_matop_ctx));
306   PetscCall(KSPSolve(ksp, target_vec, target_vec));
307 
308   PetscFunctionReturn(0);
309 }
310 
311 PetscErrorCode NodalProjectionDataDestroy(NodalProjectionData context) {
312   PetscFunctionBeginUser;
313   if (context == NULL) PetscFunctionReturn(0);
314 
315   PetscCall(DMDestroy(&context->dm));
316   PetscCall(KSPDestroy(&context->ksp));
317 
318   PetscCall(OperatorApplyContextDestroy(context->l2_rhs_ctx));
319 
320   PetscCall(PetscFree(context));
321 
322   PetscFunctionReturn(0);
323 }
324 
325 /*
326  * @brief Open a PHASTA *.dat file, grabbing dimensions and file pointer
327  *
328  * This function opens the file specified by `path` using `PetscFOpen` and passes the file pointer in `fp`.
329  * It is not closed in this function, thus `fp` must be closed sometime after this function has been called (using `PetscFClose` for example).
330  *
331  * Assumes that the first line of the file has the number of rows and columns as the only two entries, separated by a single space.
332  *
333  * @param[in]  comm           MPI_Comm for the program
334  * @param[in]  path           Path to the file
335  * @param[in]  char_array_len Length of the character array that should contain each line
336  * @param[out] dims           Dimensions of the file, taken from the first line of the file
337  * @param[out] fp File        pointer to the opened file
338  */
339 PetscErrorCode PHASTADatFileOpen(const MPI_Comm comm, const char path[PETSC_MAX_PATH_LEN], const PetscInt char_array_len, PetscInt dims[2],
340                                  FILE **fp) {
341   PetscInt ndims;
342   char     line[char_array_len];
343   char   **array;
344 
345   PetscFunctionBeginUser;
346   PetscCall(PetscFOpen(comm, path, "r", fp));
347   PetscCall(PetscSynchronizedFGets(comm, *fp, char_array_len, line));
348   PetscCall(PetscStrToArray(line, ' ', &ndims, &array));
349   PetscCheck(ndims == 2, comm, PETSC_ERR_FILE_UNEXPECTED, "Found %" PetscInt_FMT " dimensions instead of 2 on the first line of %s", ndims, path);
350 
351   for (PetscInt i = 0; i < ndims; i++) dims[i] = atoi(array[i]);
352   PetscCall(PetscStrToArrayDestroy(ndims, array));
353 
354   PetscFunctionReturn(0);
355 }
356 
357 /*
358  * @brief Get the number of rows for the PHASTA file at path.
359  *
360  * Assumes that the first line of the file has the number of rows and columns as the only two entries, separated by a single space.
361  *
362  * @param[in]  comm  MPI_Comm for the program
363  * @param[in]  path  Path to the file
364  * @param[out] nrows Number of rows
365  */
366 PetscErrorCode PHASTADatFileGetNRows(const MPI_Comm comm, const char path[PETSC_MAX_PATH_LEN], PetscInt *nrows) {
367   const PetscInt char_array_len = 512;
368   PetscInt       dims[2];
369   FILE          *fp;
370 
371   PetscFunctionBeginUser;
372   PetscCall(PHASTADatFileOpen(comm, path, char_array_len, dims, &fp));
373   *nrows = dims[0];
374   PetscCall(PetscFClose(comm, fp));
375 
376   PetscFunctionReturn(0);
377 }
378 
379 PetscErrorCode PHASTADatFileReadToArrayReal(MPI_Comm comm, const char path[PETSC_MAX_PATH_LEN], PetscReal array[]) {
380   PetscInt       ndims, dims[2];
381   FILE          *fp;
382   const PetscInt char_array_len = 512;
383   char           line[char_array_len];
384   char         **row_array;
385   PetscFunctionBeginUser;
386 
387   PetscCall(PHASTADatFileOpen(comm, path, char_array_len, dims, &fp));
388 
389   for (PetscInt i = 0; i < dims[0]; i++) {
390     PetscCall(PetscSynchronizedFGets(comm, fp, char_array_len, line));
391     PetscCall(PetscStrToArray(line, ' ', &ndims, &row_array));
392     PetscCheck(ndims == dims[1], comm, PETSC_ERR_FILE_UNEXPECTED,
393                "Line %" PetscInt_FMT " of %s does not contain enough columns (%" PetscInt_FMT " instead of %" PetscInt_FMT ")", i, path, ndims,
394                dims[1]);
395 
396     for (PetscInt j = 0; j < dims[1]; j++) {
397       array[i * dims[1] + j] = (PetscReal)atof(row_array[j]);
398     }
399   }
400 
401   PetscCall(PetscFClose(comm, fp));
402 
403   PetscFunctionReturn(0);
404 }
405