xref: /libCEED/examples/fluids/src/misc.c (revision 3f89fbfd92bafea25ba9cff7dff42016edddf2b5)
1 // Copyright (c) 2017-2022, Lawrence Livermore National Security, LLC and other CEED contributors.
2 // All Rights Reserved. See the top-level LICENSE and NOTICE files for details.
3 //
4 // SPDX-License-Identifier: BSD-2-Clause
5 //
6 // This file is part of CEED:  http://github.com/ceed
7 
8 /// @file
9 /// Miscellaneous utility functions
10 
11 #include <ceed.h>
12 #include <petscdm.h>
13 #include <petscts.h>
14 
15 #include "../navierstokes.h"
16 #include "../qfunctions/mass.h"
17 
18 PetscErrorCode ICs_FixMultiplicity(DM dm, CeedData ceed_data, User user, Vec Q_loc, Vec Q, CeedScalar time) {
19   PetscFunctionBeginUser;
20 
21   // ---------------------------------------------------------------------------
22   // Update time for evaluation
23   // ---------------------------------------------------------------------------
24   if (user->phys->ics_time_label) CeedOperatorSetContextDouble(ceed_data->op_ics_ctx->op, user->phys->ics_time_label, &time);
25 
26   // ---------------------------------------------------------------------------
27   // ICs
28   // ---------------------------------------------------------------------------
29   // -- CEED Restriction
30   CeedVector q0_ceed;
31   CeedElemRestrictionCreateVector(ceed_data->elem_restr_q, &q0_ceed, NULL);
32 
33   // -- Place PETSc vector in CEED vector
34   PetscCall(ApplyCeedOperatorLocalToGlobal(NULL, Q, ceed_data->op_ics_ctx));
35 
36   // ---------------------------------------------------------------------------
37   // Fix multiplicity for output of ICs
38   // ---------------------------------------------------------------------------
39   // -- CEED Restriction
40   CeedVector mult_vec;
41   CeedElemRestrictionCreateVector(ceed_data->elem_restr_q, &mult_vec, NULL);
42 
43   // -- Place PETSc vector in CEED vector
44   PetscMemType m_mem_type;
45   Vec          multiplicity_loc;
46   PetscCall(DMGetLocalVector(dm, &multiplicity_loc));
47   PetscCall(VecP2C(multiplicity_loc, &m_mem_type, mult_vec));
48 
49   // -- Get multiplicity
50   CeedElemRestrictionGetMultiplicity(ceed_data->elem_restr_q, mult_vec);
51 
52   // -- Restore vectors
53   PetscCall(VecC2P(mult_vec, m_mem_type, multiplicity_loc));
54 
55   // -- Local-to-Global
56   Vec multiplicity;
57   PetscCall(DMGetGlobalVector(dm, &multiplicity));
58   PetscCall(VecZeroEntries(multiplicity));
59   PetscCall(DMLocalToGlobal(dm, multiplicity_loc, ADD_VALUES, multiplicity));
60 
61   // -- Fix multiplicity
62   PetscCall(VecPointwiseDivide(Q, Q, multiplicity));
63   PetscCall(VecPointwiseDivide(Q_loc, Q_loc, multiplicity_loc));
64 
65   // -- Restore vectors
66   PetscCall(DMRestoreLocalVector(dm, &multiplicity_loc));
67   PetscCall(DMRestoreGlobalVector(dm, &multiplicity));
68 
69   // Cleanup
70   CeedVectorDestroy(&mult_vec);
71   CeedVectorDestroy(&q0_ceed);
72 
73   PetscFunctionReturn(0);
74 }
75 
76 PetscErrorCode DMPlexInsertBoundaryValues_NS(DM dm, PetscBool insert_essential, Vec Q_loc, PetscReal time, Vec face_geom_FVM, Vec cell_geom_FVM,
77                                              Vec grad_FVM) {
78   Vec Qbc, boundary_mask;
79   PetscFunctionBegin;
80 
81   // Mask (zero) Strong BC entries
82   PetscCall(DMGetNamedLocalVector(dm, "boundary mask", &boundary_mask));
83   PetscCall(VecPointwiseMult(Q_loc, Q_loc, boundary_mask));
84   PetscCall(DMRestoreNamedLocalVector(dm, "boundary mask", &boundary_mask));
85 
86   PetscCall(DMGetNamedLocalVector(dm, "Qbc", &Qbc));
87   PetscCall(VecAXPY(Q_loc, 1., Qbc));
88   PetscCall(DMRestoreNamedLocalVector(dm, "Qbc", &Qbc));
89 
90   PetscFunctionReturn(0);
91 }
92 
93 // @brief Load vector from binary file, possibly with embedded solution time and step number
94 PetscErrorCode LoadFluidsBinaryVec(MPI_Comm comm, PetscViewer viewer, Vec Q, PetscReal *time, PetscInt *step_number) {
95   PetscInt  token, file_step_number;
96   PetscReal file_time;
97   PetscFunctionBeginUser;
98 
99   // Attempt
100   PetscCall(PetscViewerBinaryRead(viewer, &token, 1, NULL, PETSC_INT));
101   if (token == FLUIDS_FILE_TOKEN) {  // New style format; we're reading a file with step number and time in the header
102     PetscCall(PetscViewerBinaryRead(viewer, &file_step_number, 1, NULL, PETSC_INT));
103     PetscCall(PetscViewerBinaryRead(viewer, &file_time, 1, NULL, PETSC_REAL));
104     if (time) *time = file_time;
105     if (step_number) *step_number = file_step_number;
106   } else if (token == VEC_FILE_CLASSID) {  // Legacy format of just the vector, encoded as [VEC_FILE_CLASSID, length, ]
107     PetscInt length, N;
108     PetscCall(PetscViewerBinaryRead(viewer, &length, 1, NULL, PETSC_INT));
109     PetscCall(VecGetSize(Q, &N));
110     PetscCheck(length == N, comm, PETSC_ERR_ARG_INCOMP, "File Vec has length %" PetscInt_FMT " but DM has global Vec size %" PetscInt_FMT, length, N);
111     PetscCall(PetscViewerBinarySetSkipHeader(viewer, PETSC_TRUE));
112   } else SETERRQ(comm, PETSC_ERR_FILE_UNEXPECTED, "Not a fluids header token or a PETSc Vec in file");
113 
114   // Load Q from existent solution
115   PetscCall(VecLoad(Q, viewer));
116 
117   PetscFunctionReturn(0);
118 }
119 
120 // Compare reference solution values with current test run for CI
121 PetscErrorCode RegressionTests_NS(AppCtx app_ctx, Vec Q) {
122   Vec         Qref;
123   PetscViewer viewer;
124   PetscReal   error, Qrefnorm;
125   MPI_Comm    comm = PetscObjectComm((PetscObject)Q);
126   PetscFunctionBegin;
127 
128   // Read reference file
129   PetscCall(VecDuplicate(Q, &Qref));
130   PetscCall(PetscViewerBinaryOpen(comm, app_ctx->test_file_path, FILE_MODE_READ, &viewer));
131   PetscCall(LoadFluidsBinaryVec(comm, viewer, Qref, NULL, NULL));
132 
133   // Compute error with respect to reference solution
134   PetscCall(VecAXPY(Q, -1.0, Qref));
135   PetscCall(VecNorm(Qref, NORM_MAX, &Qrefnorm));
136   PetscCall(VecScale(Q, 1. / Qrefnorm));
137   PetscCall(VecNorm(Q, NORM_MAX, &error));
138 
139   // Check error
140   if (error > app_ctx->test_tol) {
141     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Test failed with error norm %g\n", (double)error));
142   }
143 
144   // Cleanup
145   PetscCall(PetscViewerDestroy(&viewer));
146   PetscCall(VecDestroy(&Qref));
147 
148   PetscFunctionReturn(0);
149 }
150 
151 // Get error for problems with exact solutions
152 PetscErrorCode GetError_NS(CeedData ceed_data, DM dm, User user, Vec Q, PetscScalar final_time) {
153   PetscInt  loc_nodes;
154   Vec       Q_exact, Q_exact_loc;
155   PetscReal rel_error, norm_error, norm_exact;
156   PetscFunctionBegin;
157 
158   // Get exact solution at final time
159   PetscCall(DMCreateGlobalVector(dm, &Q_exact));
160   PetscCall(DMGetLocalVector(dm, &Q_exact_loc));
161   PetscCall(VecGetSize(Q_exact_loc, &loc_nodes));
162   PetscCall(ICs_FixMultiplicity(dm, ceed_data, user, Q_exact_loc, Q_exact, final_time));
163 
164   // Get |exact solution - obtained solution|
165   PetscCall(VecNorm(Q_exact, NORM_1, &norm_exact));
166   PetscCall(VecAXPY(Q, -1.0, Q_exact));
167   PetscCall(VecNorm(Q, NORM_1, &norm_error));
168 
169   // Compute relative error
170   rel_error = norm_error / norm_exact;
171 
172   // Output relative error
173   PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Relative Error: %g\n", (double)rel_error));
174   // Cleanup
175   PetscCall(DMRestoreLocalVector(dm, &Q_exact_loc));
176   PetscCall(VecDestroy(&Q_exact));
177 
178   PetscFunctionReturn(0);
179 }
180 
181 // Post-processing
182 PetscErrorCode PostProcess_NS(TS ts, CeedData ceed_data, DM dm, ProblemData *problem, User user, Vec Q, PetscScalar final_time) {
183   PetscInt          steps;
184   TSConvergedReason reason;
185   PetscFunctionBegin;
186 
187   // Print relative error
188   if (problem->non_zero_time && user->app_ctx->test_type == TESTTYPE_NONE) {
189     PetscCall(GetError_NS(ceed_data, dm, user, Q, final_time));
190   }
191 
192   // Print final time and number of steps
193   PetscCall(TSGetStepNumber(ts, &steps));
194   PetscCall(TSGetConvergedReason(ts, &reason));
195   if (user->app_ctx->test_type == TESTTYPE_NONE) {
196     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Time integrator %s on time step %" PetscInt_FMT " with final time %g\n", TSConvergedReasons[reason],
197                           steps, (double)final_time));
198   }
199 
200   // Output numerical values from command line
201   PetscCall(VecViewFromOptions(Q, NULL, "-vec_view"));
202 
203   // Compare reference solution values with current test run for CI
204   if (user->app_ctx->test_type == TESTTYPE_SOLVER) {
205     PetscCall(RegressionTests_NS(user->app_ctx, Q));
206   }
207 
208   PetscFunctionReturn(0);
209 }
210 
211 const PetscInt FLUIDS_FILE_TOKEN = 0xceedf00;
212 
213 // Gather initial Q values in case of continuation of simulation
214 PetscErrorCode SetupICsFromBinary(MPI_Comm comm, AppCtx app_ctx, Vec Q) {
215   PetscViewer viewer;
216 
217   PetscFunctionBegin;
218 
219   PetscCall(PetscViewerBinaryOpen(comm, app_ctx->cont_file, FILE_MODE_READ, &viewer));
220   PetscCall(LoadFluidsBinaryVec(comm, viewer, Q, &app_ctx->cont_time, &app_ctx->cont_steps));
221   PetscCall(PetscViewerDestroy(&viewer));
222 
223   PetscFunctionReturn(0);
224 }
225 
226 // Record boundary values from initial condition
227 PetscErrorCode SetBCsFromICs_NS(DM dm, Vec Q, Vec Q_loc) {
228   Vec Qbc, boundary_mask;
229   PetscFunctionBegin;
230 
231   PetscCall(DMGetNamedLocalVector(dm, "Qbc", &Qbc));
232   PetscCall(VecCopy(Q_loc, Qbc));
233   PetscCall(VecZeroEntries(Q_loc));
234   PetscCall(DMGlobalToLocal(dm, Q, INSERT_VALUES, Q_loc));
235   PetscCall(VecAXPY(Qbc, -1., Q_loc));
236   PetscCall(DMRestoreNamedLocalVector(dm, "Qbc", &Qbc));
237   PetscCall(PetscObjectComposeFunction((PetscObject)dm, "DMPlexInsertBoundaryValues_C", DMPlexInsertBoundaryValues_NS));
238 
239   PetscCall(DMGetNamedLocalVector(dm, "boundary mask", &boundary_mask));
240   PetscCall(DMGetGlobalVector(dm, &Q));
241   PetscCall(VecZeroEntries(boundary_mask));
242   PetscCall(VecSet(Q, 1.0));
243   PetscCall(DMGlobalToLocal(dm, Q, INSERT_VALUES, boundary_mask));
244   PetscCall(DMRestoreNamedLocalVector(dm, "boundary mask", &boundary_mask));
245 
246   PetscFunctionReturn(0);
247 }
248 
249 // Free a plain data context that was allocated using PETSc; returning libCEED error codes
250 int FreeContextPetsc(void *data) {
251   if (PetscFree(data)) return CeedError(NULL, CEED_ERROR_ACCESS, "PetscFree failed");
252   return CEED_ERROR_SUCCESS;
253 }
254 
255 // Return mass qfunction specification for number of components N
256 PetscErrorCode CreateMassQFunction(Ceed ceed, CeedInt N, CeedInt q_data_size, CeedQFunction *qf) {
257   PetscFunctionBeginUser;
258 
259   switch (N) {
260     case 1:
261       CeedQFunctionCreateInterior(ceed, 1, Mass_1, Mass_1_loc, qf);
262       break;
263     case 5:
264       CeedQFunctionCreateInterior(ceed, 1, Mass_5, Mass_5_loc, qf);
265       break;
266     case 7:
267       CeedQFunctionCreateInterior(ceed, 1, Mass_7, Mass_7_loc, qf);
268       break;
269     case 9:
270       CeedQFunctionCreateInterior(ceed, 1, Mass_9, Mass_9_loc, qf);
271       break;
272     case 22:
273       CeedQFunctionCreateInterior(ceed, 1, Mass_22, Mass_22_loc, qf);
274       break;
275     default:
276       SETERRQ(PETSC_COMM_WORLD, PETSC_ERR_SUP, "Could not find mass qfunction of size %d", N);
277   }
278 
279   CeedQFunctionAddInput(*qf, "u", N, CEED_EVAL_INTERP);
280   CeedQFunctionAddInput(*qf, "qdata", q_data_size, CEED_EVAL_NONE);
281   CeedQFunctionAddOutput(*qf, "v", N, CEED_EVAL_INTERP);
282   PetscFunctionReturn(0);
283 }
284 
285 /* @brief L^2 Projection of a source FEM function to a target FEM space
286  *
287  * To solve system using a lumped mass matrix, pass a KSP object with ksp_type=preonly, pc_type=jacobi, pc_jacobi_type=rowsum.
288  *
289  * @param[in]  source_vec    Global Vec of the source FEM function. NULL indicates using rhs_matop_ctx->X_loc
290  * @param[out] target_vec    Global Vec of the target (result) FEM function. NULL indicates using rhs_matop_ctx->Y_loc
291  * @param[in]  rhs_matop_ctx MatopApplyContext for performing the RHS evaluation
292  * @param[in]  ksp           KSP for solving the consistent projection problem
293  */
294 PetscErrorCode ComputeL2Projection(Vec source_vec, Vec target_vec, OperatorApplyContext rhs_matop_ctx, KSP ksp) {
295   PetscFunctionBeginUser;
296 
297   PetscCall(ApplyCeedOperatorGlobalToGlobal(source_vec, target_vec, rhs_matop_ctx));
298   PetscCall(KSPSolve(ksp, target_vec, target_vec));
299 
300   PetscFunctionReturn(0);
301 }
302 
303 PetscErrorCode NodalProjectionDataDestroy(NodalProjectionData context) {
304   PetscFunctionBeginUser;
305   if (context == NULL) PetscFunctionReturn(0);
306 
307   PetscCall(DMDestroy(&context->dm));
308   PetscCall(KSPDestroy(&context->ksp));
309 
310   PetscCall(OperatorApplyContextDestroy(context->l2_rhs_ctx));
311 
312   PetscCall(PetscFree(context));
313 
314   PetscFunctionReturn(0);
315 }
316 
317 /*
318  * @brief Open a PHASTA *.dat file, grabbing dimensions and file pointer
319  *
320  * This function opens the file specified by `path` using `PetscFOpen` and passes the file pointer in `fp`.
321  * It is not closed in this function, thus `fp` must be closed sometime after this function has been called (using `PetscFClose` for example).
322  *
323  * Assumes that the first line of the file has the number of rows and columns as the only two entries, separated by a single space.
324  *
325  * @param[in]  comm           MPI_Comm for the program
326  * @param[in]  path           Path to the file
327  * @param[in]  char_array_len Length of the character array that should contain each line
328  * @param[out] dims           Dimensions of the file, taken from the first line of the file
329  * @param[out] fp File        pointer to the opened file
330  */
331 PetscErrorCode PHASTADatFileOpen(const MPI_Comm comm, const char path[PETSC_MAX_PATH_LEN], const PetscInt char_array_len, PetscInt dims[2],
332                                  FILE **fp) {
333   PetscInt ndims;
334   char     line[char_array_len];
335   char   **array;
336 
337   PetscFunctionBeginUser;
338   PetscCall(PetscFOpen(comm, path, "r", fp));
339   PetscCall(PetscSynchronizedFGets(comm, *fp, char_array_len, line));
340   PetscCall(PetscStrToArray(line, ' ', &ndims, &array));
341   PetscCheck(ndims == 2, comm, PETSC_ERR_FILE_UNEXPECTED, "Found %" PetscInt_FMT " dimensions instead of 2 on the first line of %s", ndims, path);
342 
343   for (PetscInt i = 0; i < ndims; i++) dims[i] = atoi(array[i]);
344   PetscCall(PetscStrToArrayDestroy(ndims, array));
345 
346   PetscFunctionReturn(0);
347 }
348 
349 /*
350  * @brief Get the number of rows for the PHASTA file at path.
351  *
352  * Assumes that the first line of the file has the number of rows and columns as the only two entries, separated by a single space.
353  *
354  * @param[in]  comm  MPI_Comm for the program
355  * @param[in]  path  Path to the file
356  * @param[out] nrows Number of rows
357  */
358 PetscErrorCode PHASTADatFileGetNRows(const MPI_Comm comm, const char path[PETSC_MAX_PATH_LEN], PetscInt *nrows) {
359   const PetscInt char_array_len = 512;
360   PetscInt       dims[2];
361   FILE          *fp;
362 
363   PetscFunctionBeginUser;
364   PetscCall(PHASTADatFileOpen(comm, path, char_array_len, dims, &fp));
365   *nrows = dims[0];
366   PetscCall(PetscFClose(comm, fp));
367 
368   PetscFunctionReturn(0);
369 }
370 
371 PetscErrorCode PHASTADatFileReadToArrayReal(MPI_Comm comm, const char path[PETSC_MAX_PATH_LEN], PetscReal array[]) {
372   PetscInt       ndims, dims[2];
373   FILE          *fp;
374   const PetscInt char_array_len = 512;
375   char           line[char_array_len];
376   char         **row_array;
377   PetscFunctionBeginUser;
378 
379   PetscCall(PHASTADatFileOpen(comm, path, char_array_len, dims, &fp));
380 
381   for (PetscInt i = 0; i < dims[0]; i++) {
382     PetscCall(PetscSynchronizedFGets(comm, fp, char_array_len, line));
383     PetscCall(PetscStrToArray(line, ' ', &ndims, &row_array));
384     PetscCheck(ndims == dims[1], comm, PETSC_ERR_FILE_UNEXPECTED,
385                "Line %" PetscInt_FMT " of %s does not contain enough columns (%" PetscInt_FMT " instead of %" PetscInt_FMT ")", i, path, ndims,
386                dims[1]);
387 
388     for (PetscInt j = 0; j < dims[1]; j++) {
389       array[i * dims[1] + j] = (PetscReal)atof(row_array[j]);
390     }
391   }
392 
393   PetscCall(PetscFClose(comm, fp));
394 
395   PetscFunctionReturn(0);
396 }
397