xref: /libCEED/examples/fluids/src/misc.c (revision 1f97d2f18688a5caa5dd9f004da3151e92048b76)
1 // Copyright (c) 2017-2022, Lawrence Livermore National Security, LLC and other CEED contributors.
2 // All Rights Reserved. See the top-level LICENSE and NOTICE files for details.
3 //
4 // SPDX-License-Identifier: BSD-2-Clause
5 //
6 // This file is part of CEED:  http://github.com/ceed
7 
8 /// @file
9 /// Miscellaneous utility functions
10 
11 #include <ceed.h>
12 #include <petscdm.h>
13 #include <petscts.h>
14 
15 #include "../navierstokes.h"
16 #include "../qfunctions/mass.h"
17 
18 PetscErrorCode ICs_FixMultiplicity(DM dm, CeedData ceed_data, User user, Vec Q_loc, Vec Q, CeedScalar time) {
19   PetscFunctionBeginUser;
20 
21   // ---------------------------------------------------------------------------
22   // Update time for evaluation
23   // ---------------------------------------------------------------------------
24   if (user->phys->ics_time_label) CeedOperatorSetContextDouble(ceed_data->op_ics_ctx->op, user->phys->ics_time_label, &time);
25 
26   // ---------------------------------------------------------------------------
27   // ICs
28   // ---------------------------------------------------------------------------
29   // -- CEED Restriction
30   CeedVector q0_ceed;
31   CeedElemRestrictionCreateVector(ceed_data->elem_restr_q, &q0_ceed, NULL);
32 
33   // -- Place PETSc vector in CEED vector
34   PetscCall(ApplyCeedOperatorLocalToGlobal(NULL, Q, ceed_data->op_ics_ctx));
35 
36   // ---------------------------------------------------------------------------
37   // Fix multiplicity for output of ICs
38   // ---------------------------------------------------------------------------
39   // -- CEED Restriction
40   CeedVector mult_vec;
41   CeedElemRestrictionCreateVector(ceed_data->elem_restr_q, &mult_vec, NULL);
42 
43   // -- Place PETSc vector in CEED vector
44   PetscMemType m_mem_type;
45   Vec          multiplicity_loc;
46   PetscCall(DMGetLocalVector(dm, &multiplicity_loc));
47   PetscCall(VecP2C(multiplicity_loc, &m_mem_type, mult_vec));
48 
49   // -- Get multiplicity
50   CeedElemRestrictionGetMultiplicity(ceed_data->elem_restr_q, mult_vec);
51 
52   // -- Restore vectors
53   PetscCall(VecC2P(mult_vec, m_mem_type, multiplicity_loc));
54 
55   // -- Local-to-Global
56   Vec multiplicity;
57   PetscCall(DMGetGlobalVector(dm, &multiplicity));
58   PetscCall(VecZeroEntries(multiplicity));
59   PetscCall(DMLocalToGlobal(dm, multiplicity_loc, ADD_VALUES, multiplicity));
60 
61   // -- Fix multiplicity
62   PetscCall(VecPointwiseDivide(Q, Q, multiplicity));
63   PetscCall(VecPointwiseDivide(Q_loc, Q_loc, multiplicity_loc));
64 
65   // -- Restore vectors
66   PetscCall(DMRestoreLocalVector(dm, &multiplicity_loc));
67   PetscCall(DMRestoreGlobalVector(dm, &multiplicity));
68 
69   // Cleanup
70   CeedVectorDestroy(&mult_vec);
71   CeedVectorDestroy(&q0_ceed);
72 
73   PetscFunctionReturn(PETSC_SUCCESS);
74 }
75 
76 PetscErrorCode DMPlexInsertBoundaryValues_NS(DM dm, PetscBool insert_essential, Vec Q_loc, PetscReal time, Vec face_geom_FVM, Vec cell_geom_FVM,
77                                              Vec grad_FVM) {
78   Vec Qbc, boundary_mask;
79   PetscFunctionBegin;
80 
81   // Mask (zero) Strong BC entries
82   PetscCall(DMGetNamedLocalVector(dm, "boundary mask", &boundary_mask));
83   PetscCall(VecPointwiseMult(Q_loc, Q_loc, boundary_mask));
84   PetscCall(DMRestoreNamedLocalVector(dm, "boundary mask", &boundary_mask));
85 
86   PetscCall(DMGetNamedLocalVector(dm, "Qbc", &Qbc));
87   PetscCall(VecAXPY(Q_loc, 1., Qbc));
88   PetscCall(DMRestoreNamedLocalVector(dm, "Qbc", &Qbc));
89 
90   PetscFunctionReturn(PETSC_SUCCESS);
91 }
92 
93 // @brief Load vector from binary file, possibly with embedded solution time and step number
94 PetscErrorCode LoadFluidsBinaryVec(MPI_Comm comm, PetscViewer viewer, Vec Q, PetscReal *time, PetscInt *step_number) {
95   PetscInt   file_step_number;
96   PetscInt32 token;
97   PetscReal  file_time;
98   PetscFunctionBeginUser;
99 
100   // Attempt
101   PetscCall(PetscViewerBinaryRead(viewer, &token, 1, NULL, PETSC_INT32));
102   if (token == FLUIDS_FILE_TOKEN_32 || token == FLUIDS_FILE_TOKEN_64 ||
103       token == FLUIDS_FILE_TOKEN) {  // New style format; we're reading a file with step number and time in the header
104     PetscCall(PetscViewerBinaryRead(viewer, &file_step_number, 1, NULL, PETSC_INT));
105     PetscCall(PetscViewerBinaryRead(viewer, &file_time, 1, NULL, PETSC_REAL));
106     if (time) *time = file_time;
107     if (step_number) *step_number = file_step_number;
108   } else if (token == VEC_FILE_CLASSID) {  // Legacy format of just the vector, encoded as [VEC_FILE_CLASSID, length, ]
109     PetscInt length, N;
110     PetscCall(PetscViewerBinaryRead(viewer, &length, 1, NULL, PETSC_INT));
111     PetscCall(VecGetSize(Q, &N));
112     PetscCheck(length == N, comm, PETSC_ERR_ARG_INCOMP, "File Vec has length %" PetscInt_FMT " but DM has global Vec size %" PetscInt_FMT, length, N);
113     PetscCall(PetscViewerBinarySetSkipHeader(viewer, PETSC_TRUE));
114   } else SETERRQ(comm, PETSC_ERR_FILE_UNEXPECTED, "Not a fluids header token or a PETSc Vec in file");
115 
116   // Load Q from existent solution
117   PetscCall(VecLoad(Q, viewer));
118 
119   PetscFunctionReturn(PETSC_SUCCESS);
120 }
121 
122 // Compare reference solution values with current test run for CI
123 PetscErrorCode RegressionTests_NS(AppCtx app_ctx, Vec Q) {
124   Vec         Qref;
125   PetscViewer viewer;
126   PetscReal   error, Qrefnorm;
127   MPI_Comm    comm = PetscObjectComm((PetscObject)Q);
128   PetscFunctionBegin;
129 
130   // Read reference file
131   PetscCall(VecDuplicate(Q, &Qref));
132   PetscCall(PetscViewerBinaryOpen(comm, app_ctx->test_file_path, FILE_MODE_READ, &viewer));
133   PetscCall(LoadFluidsBinaryVec(comm, viewer, Qref, NULL, NULL));
134 
135   // Compute error with respect to reference solution
136   PetscCall(VecAXPY(Q, -1.0, Qref));
137   PetscCall(VecNorm(Qref, NORM_MAX, &Qrefnorm));
138   PetscCall(VecScale(Q, 1. / Qrefnorm));
139   PetscCall(VecNorm(Q, NORM_MAX, &error));
140 
141   // Check error
142   if (error > app_ctx->test_tol) {
143     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Test failed with error norm %g\n", (double)error));
144   }
145 
146   // Cleanup
147   PetscCall(PetscViewerDestroy(&viewer));
148   PetscCall(VecDestroy(&Qref));
149 
150   PetscFunctionReturn(PETSC_SUCCESS);
151 }
152 
153 // Get error for problems with exact solutions
154 PetscErrorCode GetError_NS(CeedData ceed_data, DM dm, User user, Vec Q, PetscScalar final_time) {
155   PetscInt  loc_nodes;
156   Vec       Q_exact, Q_exact_loc;
157   PetscReal rel_error, norm_error, norm_exact;
158   PetscFunctionBegin;
159 
160   // Get exact solution at final time
161   PetscCall(DMCreateGlobalVector(dm, &Q_exact));
162   PetscCall(DMGetLocalVector(dm, &Q_exact_loc));
163   PetscCall(VecGetSize(Q_exact_loc, &loc_nodes));
164   PetscCall(ICs_FixMultiplicity(dm, ceed_data, user, Q_exact_loc, Q_exact, final_time));
165 
166   // Get |exact solution - obtained solution|
167   PetscCall(VecNorm(Q_exact, NORM_1, &norm_exact));
168   PetscCall(VecAXPY(Q, -1.0, Q_exact));
169   PetscCall(VecNorm(Q, NORM_1, &norm_error));
170 
171   // Compute relative error
172   rel_error = norm_error / norm_exact;
173 
174   // Output relative error
175   PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Relative Error: %g\n", (double)rel_error));
176   // Cleanup
177   PetscCall(DMRestoreLocalVector(dm, &Q_exact_loc));
178   PetscCall(VecDestroy(&Q_exact));
179 
180   PetscFunctionReturn(PETSC_SUCCESS);
181 }
182 
183 // Post-processing
184 PetscErrorCode PostProcess_NS(TS ts, CeedData ceed_data, DM dm, ProblemData *problem, User user, Vec Q, PetscScalar final_time) {
185   PetscInt          steps;
186   TSConvergedReason reason;
187   PetscFunctionBegin;
188 
189   // Print relative error
190   if (problem->non_zero_time && user->app_ctx->test_type == TESTTYPE_NONE) {
191     PetscCall(GetError_NS(ceed_data, dm, user, Q, final_time));
192   }
193 
194   // Print final time and number of steps
195   PetscCall(TSGetStepNumber(ts, &steps));
196   PetscCall(TSGetConvergedReason(ts, &reason));
197   if (user->app_ctx->test_type == TESTTYPE_NONE) {
198     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Time integrator %s on time step %" PetscInt_FMT " with final time %g\n", TSConvergedReasons[reason],
199                           steps, (double)final_time));
200   }
201 
202   // Output numerical values from command line
203   PetscCall(VecViewFromOptions(Q, NULL, "-vec_view"));
204 
205   // Compare reference solution values with current test run for CI
206   if (user->app_ctx->test_type == TESTTYPE_SOLVER) {
207     PetscCall(RegressionTests_NS(user->app_ctx, Q));
208   }
209   PetscFunctionReturn(PETSC_SUCCESS);
210 }
211 
212 const PetscInt32 FLUIDS_FILE_TOKEN    = 0xceedf00;  // for backwards compatibility
213 const PetscInt32 FLUIDS_FILE_TOKEN_32 = 0xceedf32;
214 const PetscInt32 FLUIDS_FILE_TOKEN_64 = 0xceedf64;
215 
216 // Gather initial Q values in case of continuation of simulation
217 PetscErrorCode SetupICsFromBinary(MPI_Comm comm, AppCtx app_ctx, Vec Q) {
218   PetscViewer viewer;
219 
220   PetscFunctionBegin;
221 
222   PetscCall(PetscViewerBinaryOpen(comm, app_ctx->cont_file, FILE_MODE_READ, &viewer));
223   PetscCall(LoadFluidsBinaryVec(comm, viewer, Q, &app_ctx->cont_time, &app_ctx->cont_steps));
224   PetscCall(PetscViewerDestroy(&viewer));
225 
226   PetscFunctionReturn(PETSC_SUCCESS);
227 }
228 
229 // Record boundary values from initial condition
230 PetscErrorCode SetBCsFromICs_NS(DM dm, Vec Q, Vec Q_loc) {
231   Vec Qbc, boundary_mask;
232   PetscFunctionBegin;
233 
234   PetscCall(DMGetNamedLocalVector(dm, "Qbc", &Qbc));
235   PetscCall(VecCopy(Q_loc, Qbc));
236   PetscCall(VecZeroEntries(Q_loc));
237   PetscCall(DMGlobalToLocal(dm, Q, INSERT_VALUES, Q_loc));
238   PetscCall(VecAXPY(Qbc, -1., Q_loc));
239   PetscCall(DMRestoreNamedLocalVector(dm, "Qbc", &Qbc));
240   PetscCall(PetscObjectComposeFunction((PetscObject)dm, "DMPlexInsertBoundaryValues_C", DMPlexInsertBoundaryValues_NS));
241 
242   PetscCall(DMGetNamedLocalVector(dm, "boundary mask", &boundary_mask));
243   PetscCall(DMGetGlobalVector(dm, &Q));
244   PetscCall(VecZeroEntries(boundary_mask));
245   PetscCall(VecSet(Q, 1.0));
246   PetscCall(DMGlobalToLocal(dm, Q, INSERT_VALUES, boundary_mask));
247   PetscCall(DMRestoreNamedLocalVector(dm, "boundary mask", &boundary_mask));
248 
249   PetscFunctionReturn(PETSC_SUCCESS);
250 }
251 
252 // Free a plain data context that was allocated using PETSc; returning libCEED error codes
253 int FreeContextPetsc(void *data) {
254   if (PetscFree(data)) return CeedError(NULL, CEED_ERROR_ACCESS, "PetscFree failed");
255   return CEED_ERROR_SUCCESS;
256 }
257 
258 // Return mass qfunction specification for number of components N
259 PetscErrorCode CreateMassQFunction(Ceed ceed, CeedInt N, CeedInt q_data_size, CeedQFunction *qf) {
260   PetscFunctionBeginUser;
261 
262   switch (N) {
263     case 1:
264       CeedQFunctionCreateInterior(ceed, 1, Mass_1, Mass_1_loc, qf);
265       break;
266     case 5:
267       CeedQFunctionCreateInterior(ceed, 1, Mass_5, Mass_5_loc, qf);
268       break;
269     case 7:
270       CeedQFunctionCreateInterior(ceed, 1, Mass_7, Mass_7_loc, qf);
271       break;
272     case 9:
273       CeedQFunctionCreateInterior(ceed, 1, Mass_9, Mass_9_loc, qf);
274       break;
275     case 22:
276       CeedQFunctionCreateInterior(ceed, 1, Mass_22, Mass_22_loc, qf);
277       break;
278     default:
279       SETERRQ(PETSC_COMM_WORLD, PETSC_ERR_SUP, "Could not find mass qfunction of size %d", N);
280   }
281 
282   CeedQFunctionAddInput(*qf, "u", N, CEED_EVAL_INTERP);
283   CeedQFunctionAddInput(*qf, "qdata", q_data_size, CEED_EVAL_NONE);
284   CeedQFunctionAddOutput(*qf, "v", N, CEED_EVAL_INTERP);
285   PetscFunctionReturn(PETSC_SUCCESS);
286 }
287 
288 /* @brief L^2 Projection of a source FEM function to a target FEM space
289  *
290  * To solve system using a lumped mass matrix, pass a KSP object with ksp_type=preonly, pc_type=jacobi, pc_jacobi_type=rowsum.
291  *
292  * @param[in]  source_vec    Global Vec of the source FEM function. NULL indicates using rhs_matop_ctx->X_loc
293  * @param[out] target_vec    Global Vec of the target (result) FEM function. NULL indicates using rhs_matop_ctx->Y_loc
294  * @param[in]  rhs_matop_ctx MatopApplyContext for performing the RHS evaluation
295  * @param[in]  ksp           KSP for solving the consistent projection problem
296  */
297 PetscErrorCode ComputeL2Projection(Vec source_vec, Vec target_vec, OperatorApplyContext rhs_matop_ctx, KSP ksp) {
298   PetscFunctionBeginUser;
299 
300   PetscCall(ApplyCeedOperatorGlobalToGlobal(source_vec, target_vec, rhs_matop_ctx));
301   PetscCall(KSPSolve(ksp, target_vec, target_vec));
302 
303   PetscFunctionReturn(PETSC_SUCCESS);
304 }
305 
306 PetscErrorCode NodalProjectionDataDestroy(NodalProjectionData context) {
307   PetscFunctionBeginUser;
308   if (context == NULL) PetscFunctionReturn(PETSC_SUCCESS);
309 
310   PetscCall(DMDestroy(&context->dm));
311   PetscCall(KSPDestroy(&context->ksp));
312 
313   PetscCall(OperatorApplyContextDestroy(context->l2_rhs_ctx));
314 
315   PetscCall(PetscFree(context));
316 
317   PetscFunctionReturn(PETSC_SUCCESS);
318 }
319 
320 /*
321  * @brief Open a PHASTA *.dat file, grabbing dimensions and file pointer
322  *
323  * This function opens the file specified by `path` using `PetscFOpen` and passes the file pointer in `fp`.
324  * It is not closed in this function, thus `fp` must be closed sometime after this function has been called (using `PetscFClose` for example).
325  *
326  * Assumes that the first line of the file has the number of rows and columns as the only two entries, separated by a single space.
327  *
328  * @param[in]  comm           MPI_Comm for the program
329  * @param[in]  path           Path to the file
330  * @param[in]  char_array_len Length of the character array that should contain each line
331  * @param[out] dims           Dimensions of the file, taken from the first line of the file
332  * @param[out] fp File        pointer to the opened file
333  */
334 PetscErrorCode PHASTADatFileOpen(const MPI_Comm comm, const char path[PETSC_MAX_PATH_LEN], const PetscInt char_array_len, PetscInt dims[2],
335                                  FILE **fp) {
336   int    ndims;
337   char   line[char_array_len];
338   char **array;
339 
340   PetscFunctionBeginUser;
341   PetscCall(PetscFOpen(comm, path, "r", fp));
342   PetscCall(PetscSynchronizedFGets(comm, *fp, char_array_len, line));
343   PetscCall(PetscStrToArray(line, ' ', &ndims, &array));
344   PetscCheck(ndims == 2, comm, PETSC_ERR_FILE_UNEXPECTED, "Found %d dimensions instead of 2 on the first line of %s", ndims, path);
345 
346   for (PetscInt i = 0; i < ndims; i++) dims[i] = atoi(array[i]);
347   PetscCall(PetscStrToArrayDestroy(ndims, array));
348 
349   PetscFunctionReturn(PETSC_SUCCESS);
350 }
351 
352 /*
353  * @brief Get the number of rows for the PHASTA file at path.
354  *
355  * Assumes that the first line of the file has the number of rows and columns as the only two entries, separated by a single space.
356  *
357  * @param[in]  comm  MPI_Comm for the program
358  * @param[in]  path  Path to the file
359  * @param[out] nrows Number of rows
360  */
361 PetscErrorCode PHASTADatFileGetNRows(const MPI_Comm comm, const char path[PETSC_MAX_PATH_LEN], PetscInt *nrows) {
362   const PetscInt char_array_len = 512;
363   PetscInt       dims[2];
364   FILE          *fp;
365 
366   PetscFunctionBeginUser;
367   PetscCall(PHASTADatFileOpen(comm, path, char_array_len, dims, &fp));
368   *nrows = dims[0];
369   PetscCall(PetscFClose(comm, fp));
370 
371   PetscFunctionReturn(PETSC_SUCCESS);
372 }
373 
374 PetscErrorCode PHASTADatFileReadToArrayReal(MPI_Comm comm, const char path[PETSC_MAX_PATH_LEN], PetscReal array[]) {
375   PetscInt       dims[2];
376   int            ndims;
377   FILE          *fp;
378   const PetscInt char_array_len = 512;
379   char           line[char_array_len];
380   char         **row_array;
381   PetscFunctionBeginUser;
382 
383   PetscCall(PHASTADatFileOpen(comm, path, char_array_len, dims, &fp));
384 
385   for (PetscInt i = 0; i < dims[0]; i++) {
386     PetscCall(PetscSynchronizedFGets(comm, fp, char_array_len, line));
387     PetscCall(PetscStrToArray(line, ' ', &ndims, &row_array));
388     PetscCheck(ndims == dims[1], comm, PETSC_ERR_FILE_UNEXPECTED,
389                "Line %" PetscInt_FMT " of %s does not contain enough columns (%d instead of %" PetscInt_FMT ")", i, path, ndims, dims[1]);
390 
391     for (PetscInt j = 0; j < dims[1]; j++) {
392       array[i * dims[1] + j] = (PetscReal)atof(row_array[j]);
393     }
394   }
395 
396   PetscCall(PetscFClose(comm, fp));
397 
398   PetscFunctionReturn(PETSC_SUCCESS);
399 }
400 
401 PetscLogEvent       FLUIDS_CeedOperatorApply;
402 PetscLogEvent       FLUIDS_CeedOperatorAssemble;
403 PetscLogEvent       FLUIDS_CeedOperatorAssembleDiagonal;
404 PetscLogEvent       FLUIDS_CeedOperatorAssemblePointBlockDiagonal;
405 static PetscClassId libCEED_classid;
406 
407 PetscErrorCode RegisterLogEvents() {
408   PetscFunctionBeginUser;
409   PetscCall(PetscClassIdRegister("libCEED", &libCEED_classid));
410   PetscCall(PetscLogEventRegister("CeedOpApply", libCEED_classid, &FLUIDS_CeedOperatorApply));
411   PetscCall(PetscLogEventRegister("CeedOpAsm", libCEED_classid, &FLUIDS_CeedOperatorAssemble));
412   PetscCall(PetscLogEventRegister("CeedOpAsmD", libCEED_classid, &FLUIDS_CeedOperatorAssembleDiagonal));
413   PetscCall(PetscLogEventRegister("CeedOpAsmPBD", libCEED_classid, &FLUIDS_CeedOperatorAssemblePointBlockDiagonal));
414   PetscFunctionReturn(PETSC_SUCCESS);
415 }
416 
417 /**
418   @brief Translate array of CeedInt to PetscInt.
419     If the types differ, `array_ceed` is freed with `free()` and `array_petsc` is allocated with `malloc()`.
420     Caller is responsible for freeing `array_petsc` with `free()`.
421 
422   @param[in]      num_entries  Number of array entries
423   @param[in,out]  array_ceed   Array of CeedInts
424   @param[out]     array_petsc  Array of PetscInts
425 **/
426 PetscErrorCode IntArrayC2P(PetscInt num_entries, CeedInt **array_ceed, PetscInt **array_petsc) {
427   CeedInt  int_c = 0;
428   PetscInt int_p = 0;
429 
430   PetscFunctionBeginUser;
431   if (sizeof(int_c) == sizeof(int_p)) {
432     *array_petsc = (PetscInt *)*array_ceed;
433   } else {
434     *array_petsc = malloc(num_entries * sizeof(PetscInt));
435     for (PetscInt i = 0; i < num_entries; i++) (*array_petsc)[i] = (*array_ceed)[i];
436     free(*array_ceed);
437   }
438   *array_ceed = NULL;
439 
440   PetscFunctionReturn(PETSC_SUCCESS);
441 }
442 
443 /**
444   @brief Translate array of PetscInt to CeedInt.
445     If the types differ, `array_petsc` is freed with `PetscFree()` and `array_ceed` is allocated with `PetscMalloc1()`.
446     Caller is responsible for freeing `array_ceed` with `PetscFree()`.
447 
448   @param[in]      num_entries  Number of array entries
449   @param[in,out]  array_petsc  Array of PetscInts
450   @param[out]     array_ceed   Array of CeedInts
451 **/
452 PetscErrorCode IntArrayP2C(PetscInt num_entries, PetscInt **array_petsc, CeedInt **array_ceed) {
453   CeedInt  int_c = 0;
454   PetscInt int_p = 0;
455 
456   PetscFunctionBeginUser;
457   if (sizeof(int_c) == sizeof(int_p)) {
458     *array_ceed = (CeedInt *)*array_petsc;
459   } else {
460     PetscCall(PetscMalloc1(num_entries, array_ceed));
461     for (PetscInt i = 0; i < num_entries; i++) (*array_ceed)[i] = (*array_petsc)[i];
462     PetscCall(PetscFree(*array_petsc));
463   }
464   *array_petsc = NULL;
465 
466   PetscFunctionReturn(PETSC_SUCCESS);
467 }
468