xref: /libCEED/examples/fluids/src/misc.c (revision 8535f0aae359c08d55230a1e48a051b83ac1b33c)
1 // Copyright (c) 2017-2024, Lawrence Livermore National Security, LLC and other CEED contributors.
2 // All Rights Reserved. See the top-level LICENSE and NOTICE files for details.
3 //
4 // SPDX-License-Identifier: BSD-2-Clause
5 //
6 // This file is part of CEED:  http://github.com/ceed
7 
8 /// @file
9 /// Miscellaneous utility functions
10 
11 #include <ceed.h>
12 #include <petscdm.h>
13 #include <petscsf.h>
14 #include <petscts.h>
15 
16 #include "../navierstokes.h"
17 #include "../qfunctions/mass.h"
18 
19 PetscErrorCode ICs_FixMultiplicity(DM dm, CeedData ceed_data, User user, Vec Q_loc, Vec Q, CeedScalar time) {
20   Ceed         ceed = user->ceed;
21   CeedVector   mult_vec;
22   PetscMemType m_mem_type;
23   Vec          Multiplicity, Multiplicity_loc;
24 
25   PetscFunctionBeginUser;
26   if (user->phys->ics_time_label) PetscCallCeed(ceed, CeedOperatorSetContextDouble(ceed_data->op_ics_ctx->op, user->phys->ics_time_label, &time));
27   PetscCall(ApplyCeedOperatorLocalToGlobal(NULL, Q, ceed_data->op_ics_ctx));
28 
29   PetscCallCeed(ceed, CeedElemRestrictionCreateVector(ceed_data->elem_restr_q, &mult_vec, NULL));
30 
31   // -- Get multiplicity
32   PetscCall(DMGetLocalVector(dm, &Multiplicity_loc));
33   PetscCall(VecPetscToCeed(Multiplicity_loc, &m_mem_type, mult_vec));
34   PetscCallCeed(ceed, CeedElemRestrictionGetMultiplicity(ceed_data->elem_restr_q, mult_vec));
35   PetscCall(VecCeedToPetsc(mult_vec, m_mem_type, Multiplicity_loc));
36 
37   PetscCall(DMGetGlobalVector(dm, &Multiplicity));
38   PetscCall(VecZeroEntries(Multiplicity));
39   PetscCall(DMLocalToGlobal(dm, Multiplicity_loc, ADD_VALUES, Multiplicity));
40 
41   // -- Fix multiplicity
42   PetscCall(VecPointwiseDivide(Q, Q, Multiplicity));
43   PetscCall(VecPointwiseDivide(Q_loc, Q_loc, Multiplicity_loc));
44 
45   PetscCall(DMRestoreLocalVector(dm, &Multiplicity_loc));
46   PetscCall(DMRestoreGlobalVector(dm, &Multiplicity));
47   PetscCallCeed(ceed, CeedVectorDestroy(&mult_vec));
48   PetscFunctionReturn(PETSC_SUCCESS);
49 }
50 
51 // Record boundary values from initial condition
52 PetscErrorCode SetBCsFromICs(DM dm, Vec Q, Vec Q_loc) {
53   PetscFunctionBeginUser;
54   {  // Capture initial condition values in Qbc
55     Vec Qbc;
56 
57     PetscCall(DMGetNamedLocalVector(dm, "Qbc", &Qbc));
58     PetscCall(VecCopy(Q_loc, Qbc));
59     PetscCall(VecZeroEntries(Q_loc));
60     PetscCall(DMGlobalToLocal(dm, Q, INSERT_VALUES, Q_loc));
61     PetscCall(VecAXPY(Qbc, -1., Q_loc));
62     PetscCall(DMRestoreNamedLocalVector(dm, "Qbc", &Qbc));
63   }
64   PetscCall(PetscObjectComposeFunction((PetscObject)dm, "DMPlexInsertBoundaryValues_C", DMPlexInsertBoundaryValues_FromICs));
65 
66   {  // Set boundary mask to zero out essential BCs
67     Vec boundary_mask, ones;
68 
69     PetscCall(DMGetNamedLocalVector(dm, "boundary mask", &boundary_mask));
70     PetscCall(DMGetGlobalVector(dm, &ones));
71     PetscCall(VecZeroEntries(boundary_mask));
72     PetscCall(VecSet(ones, 1.0));
73     PetscCall(DMGlobalToLocal(dm, ones, INSERT_VALUES, boundary_mask));
74     PetscCall(DMRestoreNamedLocalVector(dm, "boundary mask", &boundary_mask));
75     PetscCall(DMRestoreGlobalVector(dm, &ones));
76   }
77   PetscFunctionReturn(PETSC_SUCCESS);
78 }
79 
80 PetscErrorCode DMPlexInsertBoundaryValues_FromICs(DM dm, PetscBool insert_essential, Vec Q_loc, PetscReal time, Vec face_geom_FVM, Vec cell_geom_FVM,
81                                                   Vec grad_FVM) {
82   Vec Qbc, boundary_mask;
83 
84   PetscFunctionBeginUser;
85   // Mask (zero) Strong BC entries
86   PetscCall(DMGetNamedLocalVector(dm, "boundary mask", &boundary_mask));
87   PetscCall(VecPointwiseMult(Q_loc, Q_loc, boundary_mask));
88   PetscCall(DMRestoreNamedLocalVector(dm, "boundary mask", &boundary_mask));
89 
90   PetscCall(DMGetNamedLocalVector(dm, "Qbc", &Qbc));
91   PetscCall(VecAXPY(Q_loc, 1., Qbc));
92   PetscCall(DMRestoreNamedLocalVector(dm, "Qbc", &Qbc));
93   PetscFunctionReturn(PETSC_SUCCESS);
94 }
95 
96 static PetscErrorCode BinaryReadIntoInt(PetscViewer viewer, PetscInt *out, PetscDataType file_type) {
97   PetscFunctionBeginUser;
98   if (file_type == PETSC_INT32) {
99     PetscInt32 val;
100     PetscCall(PetscViewerBinaryRead(viewer, &val, 1, NULL, PETSC_INT32));
101     *out = val;
102   } else if (file_type == PETSC_INT64) {
103     PetscInt64 val;
104     PetscCall(PetscViewerBinaryRead(viewer, &val, 1, NULL, PETSC_INT64));
105     *out = val;
106   } else {
107     PetscCall(PetscViewerBinaryRead(viewer, out, 1, NULL, PETSC_INT));
108   }
109   PetscFunctionReturn(PETSC_SUCCESS);
110 }
111 
112 // @brief Load vector from binary file, possibly with embedded solution time and step number
113 PetscErrorCode LoadFluidsBinaryVec(MPI_Comm comm, PetscViewer viewer, Vec Q, PetscReal *time, PetscInt *step_number) {
114   PetscInt      file_step_number;
115   PetscInt32    token;
116   PetscReal     file_time;
117   PetscDataType file_type = PETSC_INT32;
118 
119   PetscFunctionBeginUser;
120   PetscCall(PetscViewerBinaryRead(viewer, &token, 1, NULL, PETSC_INT32));
121   if (token == FLUIDS_FILE_TOKEN_32 || token == FLUIDS_FILE_TOKEN_64 ||
122       token == FLUIDS_FILE_TOKEN) {  // New style format; we're reading a file with step number and time in the header
123     if (token == FLUIDS_FILE_TOKEN_32) file_type = PETSC_INT32;
124     else if (token == FLUIDS_FILE_TOKEN_64) file_type = PETSC_INT64;
125     PetscCall(BinaryReadIntoInt(viewer, &file_step_number, file_type));
126     PetscCall(PetscViewerBinaryRead(viewer, &file_time, 1, NULL, PETSC_REAL));
127     if (time) *time = file_time;
128     if (step_number) *step_number = file_step_number;
129   } else if (token == VEC_FILE_CLASSID) {  // Legacy format of just the vector, encoded as [VEC_FILE_CLASSID, length, ]
130     PetscInt length, N;
131     PetscCall(BinaryReadIntoInt(viewer, &length, file_type));
132     PetscCall(VecGetSize(Q, &N));
133     PetscCheck(length == N, comm, PETSC_ERR_ARG_INCOMP, "File Vec has length %" PetscInt_FMT " but DM has global Vec size %" PetscInt_FMT, length, N);
134     PetscCall(PetscViewerBinarySetSkipHeader(viewer, PETSC_TRUE));
135   } else SETERRQ(comm, PETSC_ERR_FILE_UNEXPECTED, "Not a fluids header token or a PETSc Vec in file");
136 
137   PetscCall(VecLoad(Q, viewer));
138   PetscFunctionReturn(PETSC_SUCCESS);
139 }
140 
141 // Compare reference solution values with current test run for CI
142 PetscErrorCode RegressionTest(AppCtx app_ctx, Vec Q) {
143   Vec         Q_ref;
144   PetscViewer viewer;
145   PetscReal   error, norm_Q, norm_Q_ref;
146   MPI_Comm    comm = PetscObjectComm((PetscObject)Q);
147 
148   PetscFunctionBeginUser;
149   // Read reference file
150   PetscCall(VecDuplicate(Q, &Q_ref));
151   PetscCheck(strcmp(app_ctx->test_file_path, "") != 0, comm, PETSC_ERR_FILE_READ, "File for regression test not given");
152   PetscCall(PetscViewerBinaryOpen(comm, app_ctx->test_file_path, FILE_MODE_READ, &viewer));
153   PetscCall(LoadFluidsBinaryVec(comm, viewer, Q_ref, NULL, NULL));
154 
155   // Compute error with respect to reference solution
156   PetscCall(VecNorm(Q_ref, NORM_MAX, &norm_Q));
157   PetscCall(VecNorm(Q_ref, NORM_MAX, &norm_Q_ref));
158   PetscCall(VecAXPY(Q, -1.0, Q_ref));
159   PetscCall(VecScale(Q, 1. / norm_Q_ref));
160   PetscCall(VecNorm(Q, NORM_MAX, &error));
161 
162   // Check error
163   if (error > app_ctx->test_tol) {
164     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Test failed with error norm %g\nReference solution max norm: %g Computed solution max norm %g\n",
165                           (double)error, (double)norm_Q_ref, (double)norm_Q));
166   }
167 
168   // Cleanup
169   PetscCall(PetscViewerDestroy(&viewer));
170   PetscCall(VecDestroy(&Q_ref));
171   PetscFunctionReturn(PETSC_SUCCESS);
172 }
173 
174 // Get error for problems with exact solutions
175 PetscErrorCode PrintError(CeedData ceed_data, DM dm, User user, Vec Q, PetscScalar final_time) {
176   PetscInt  loc_nodes;
177   Vec       Q_exact, Q_exact_loc;
178   PetscReal rel_error, norm_error, norm_exact;
179 
180   PetscFunctionBeginUser;
181   // Get exact solution at final time
182   PetscCall(DMGetGlobalVector(dm, &Q_exact));
183   PetscCall(DMGetLocalVector(dm, &Q_exact_loc));
184   PetscCall(VecGetSize(Q_exact_loc, &loc_nodes));
185   PetscCall(ICs_FixMultiplicity(dm, ceed_data, user, Q_exact_loc, Q_exact, final_time));
186 
187   // Get |exact solution - obtained solution|
188   PetscCall(VecNorm(Q_exact, NORM_1, &norm_exact));
189   PetscCall(VecAXPY(Q, -1.0, Q_exact));
190   PetscCall(VecNorm(Q, NORM_1, &norm_error));
191 
192   rel_error = norm_error / norm_exact;
193   PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Relative Error: %g\n", (double)rel_error));
194   PetscCall(DMRestoreLocalVector(dm, &Q_exact_loc));
195   PetscCall(DMRestoreGlobalVector(dm, &Q_exact));
196   PetscFunctionReturn(PETSC_SUCCESS);
197 }
198 
199 // Post-processing
200 PetscErrorCode PostProcess(TS ts, CeedData ceed_data, DM dm, ProblemData problem, User user, Vec Q, PetscScalar final_time) {
201   PetscInt          steps;
202   TSConvergedReason reason;
203 
204   PetscFunctionBeginUser;
205   // Print relative error
206   if (problem->compute_exact_solution_error && user->app_ctx->test_type == TESTTYPE_NONE) {
207     PetscCall(PrintError(ceed_data, dm, user, Q, final_time));
208   }
209 
210   // Print final time and number of steps
211   PetscCall(TSGetStepNumber(ts, &steps));
212   PetscCall(TSGetConvergedReason(ts, &reason));
213   if (user->app_ctx->test_type == TESTTYPE_NONE) {
214     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Time integrator %s on time step %" PetscInt_FMT " with final time %g\n", TSConvergedReasons[reason],
215                           steps, (double)final_time));
216   }
217 
218   // Output numerical values from command line
219   PetscCall(VecViewFromOptions(Q, NULL, "-vec_view"));
220 
221   // Compare reference solution values with current test run for CI
222   if (user->app_ctx->test_type == TESTTYPE_SOLVER) {
223     PetscCall(RegressionTest(user->app_ctx, Q));
224   }
225   PetscFunctionReturn(PETSC_SUCCESS);
226 }
227 
228 const PetscInt32 FLUIDS_FILE_TOKEN    = 0xceedf00;  // for backwards compatibility
229 const PetscInt32 FLUIDS_FILE_TOKEN_32 = 0xceedf32;
230 const PetscInt32 FLUIDS_FILE_TOKEN_64 = 0xceedf64;
231 
232 // Gather initial Q values in case of continuation of simulation
233 PetscErrorCode SetupICsFromBinary(MPI_Comm comm, AppCtx app_ctx, Vec Q) {
234   PetscViewer viewer;
235 
236   PetscFunctionBeginUser;
237   PetscCall(PetscViewerBinaryOpen(comm, app_ctx->cont_file, FILE_MODE_READ, &viewer));
238   PetscCall(LoadFluidsBinaryVec(comm, viewer, Q, &app_ctx->cont_time, &app_ctx->cont_steps));
239   PetscCall(PetscViewerDestroy(&viewer));
240   PetscFunctionReturn(PETSC_SUCCESS);
241 }
242 
243 // Free a plain data context that was allocated using PETSc; returning libCEED error codes
244 int FreeContextPetsc(void *data) {
245   if (PetscFree(data)) return CeedError(NULL, CEED_ERROR_ACCESS, "PetscFree failed");
246   return CEED_ERROR_SUCCESS;
247 }
248 
249 // Return mass qfunction specification for number of components N
250 PetscErrorCode CreateMassQFunction(Ceed ceed, CeedInt N, CeedInt q_data_size, CeedQFunction *qf) {
251   PetscFunctionBeginUser;
252   switch (N) {
253     case 1:
254       PetscCallCeed(ceed, CeedQFunctionCreateInterior(ceed, 1, Mass_1, Mass_1_loc, qf));
255       break;
256     case 5:
257       PetscCallCeed(ceed, CeedQFunctionCreateInterior(ceed, 1, Mass_5, Mass_5_loc, qf));
258       break;
259     case 7:
260       PetscCallCeed(ceed, CeedQFunctionCreateInterior(ceed, 1, Mass_7, Mass_7_loc, qf));
261       break;
262     case 9:
263       PetscCallCeed(ceed, CeedQFunctionCreateInterior(ceed, 1, Mass_9, Mass_9_loc, qf));
264       break;
265     case 22:
266       PetscCallCeed(ceed, CeedQFunctionCreateInterior(ceed, 1, Mass_22, Mass_22_loc, qf));
267       break;
268     default:
269       SETERRQ(PETSC_COMM_WORLD, PETSC_ERR_SUP, "Could not find mass qfunction of size %d", N);
270   }
271 
272   PetscCallCeed(ceed, CeedQFunctionAddInput(*qf, "u", N, CEED_EVAL_INTERP));
273   PetscCallCeed(ceed, CeedQFunctionAddInput(*qf, "qdata", q_data_size, CEED_EVAL_NONE));
274   PetscCallCeed(ceed, CeedQFunctionAddOutput(*qf, "v", N, CEED_EVAL_INTERP));
275   PetscCallCeed(ceed, CeedQFunctionSetUserFlopsEstimate(*qf, N));
276   PetscFunctionReturn(PETSC_SUCCESS);
277 }
278 
279 PetscErrorCode NodalProjectionDataDestroy(NodalProjectionData context) {
280   PetscFunctionBeginUser;
281   if (context == NULL) PetscFunctionReturn(PETSC_SUCCESS);
282 
283   PetscCall(DMDestroy(&context->dm));
284   PetscCall(KSPDestroy(&context->ksp));
285 
286   PetscCall(OperatorApplyContextDestroy(context->l2_rhs_ctx));
287 
288   PetscCall(PetscFree(context));
289   PetscFunctionReturn(PETSC_SUCCESS);
290 }
291 
292 /*
293  * @brief Open a PHASTA *.dat file, grabbing dimensions and file pointer
294  *
295  * This function opens the file specified by `path` using `PetscFOpen` and passes the file pointer in `fp`.
296  * It is not closed in this function, thus `fp` must be closed sometime after this function has been called (using `PetscFClose` for example).
297  *
298  * Assumes that the first line of the file has the number of rows and columns as the only two entries, separated by a single space.
299  *
300  * @param[in]  comm           MPI_Comm for the program
301  * @param[in]  path           Path to the file
302  * @param[in]  char_array_len Length of the character array that should contain each line
303  * @param[out] dims           Dimensions of the file, taken from the first line of the file
304  * @param[out] fp File        pointer to the opened file
305  */
306 PetscErrorCode PhastaDatFileOpen(const MPI_Comm comm, const char path[PETSC_MAX_PATH_LEN], const PetscInt char_array_len, PetscInt dims[2],
307                                  FILE **fp) {
308   int    ndims;
309   char   line[char_array_len];
310   char **array;
311 
312   PetscFunctionBeginUser;
313   PetscCall(PetscFOpen(comm, path, "r", fp));
314   PetscCall(PetscSynchronizedFGets(comm, *fp, char_array_len, line));
315   PetscCall(PetscStrToArray(line, ' ', &ndims, &array));
316   PetscCheck(ndims == 2, comm, PETSC_ERR_FILE_UNEXPECTED, "Found %d dimensions instead of 2 on the first line of %s", ndims, path);
317 
318   for (PetscInt i = 0; i < ndims; i++) dims[i] = atoi(array[i]);
319   PetscCall(PetscStrToArrayDestroy(ndims, array));
320   PetscFunctionReturn(PETSC_SUCCESS);
321 }
322 
323 /*
324  * @brief Get the number of rows for the PHASTA file at path.
325  *
326  * Assumes that the first line of the file has the number of rows and columns as the only two entries, separated by a single space.
327  *
328  * @param[in]  comm  MPI_Comm for the program
329  * @param[in]  path  Path to the file
330  * @param[out] nrows Number of rows
331  */
332 PetscErrorCode PhastaDatFileGetNRows(const MPI_Comm comm, const char path[PETSC_MAX_PATH_LEN], PetscInt *nrows) {
333   const PetscInt char_array_len = 512;
334   PetscInt       dims[2];
335   FILE          *fp;
336 
337   PetscFunctionBeginUser;
338   PetscCall(PhastaDatFileOpen(comm, path, char_array_len, dims, &fp));
339   *nrows = dims[0];
340   PetscCall(PetscFClose(comm, fp));
341   PetscFunctionReturn(PETSC_SUCCESS);
342 }
343 
344 PetscErrorCode PhastaDatFileReadToArrayReal(MPI_Comm comm, const char path[PETSC_MAX_PATH_LEN], PetscReal array[]) {
345   PetscInt       dims[2];
346   FILE          *fp;
347   const PetscInt char_array_len = 512;
348   char           line[char_array_len];
349 
350   PetscFunctionBeginUser;
351   PetscCall(PhastaDatFileOpen(comm, path, char_array_len, dims, &fp));
352 
353   for (PetscInt i = 0; i < dims[0]; i++) {
354     int    ndims;
355     char **row_array;
356 
357     PetscCall(PetscSynchronizedFGets(comm, fp, char_array_len, line));
358     PetscCall(PetscStrToArray(line, ' ', &ndims, &row_array));
359     PetscCheck(ndims == dims[1], comm, PETSC_ERR_FILE_UNEXPECTED,
360                "Line %" PetscInt_FMT " of %s does not contain enough columns (%d instead of %" PetscInt_FMT ")", i, path, ndims, dims[1]);
361 
362     for (PetscInt j = 0; j < dims[1]; j++) array[i * dims[1] + j] = (PetscReal)atof(row_array[j]);
363     PetscCall(PetscStrToArrayDestroy(ndims, row_array));
364   }
365 
366   PetscCall(PetscFClose(comm, fp));
367   PetscFunctionReturn(PETSC_SUCCESS);
368 }
369 
370 // Print information about the given simulation run
371 PetscErrorCode PrintRunInfo(User user, Physics phys_ctx, ProblemData problem, TS ts) {
372   Ceed     ceed = user->ceed;
373   MPI_Comm comm = PetscObjectComm((PetscObject)ts);
374 
375   PetscFunctionBeginUser;
376   // Header and rank
377   char        host_name[PETSC_MAX_PATH_LEN];
378   PetscMPIInt rank, comm_size;
379   PetscCall(PetscGetHostName(host_name, sizeof host_name));
380   PetscCallMPI(MPI_Comm_rank(comm, &rank));
381   PetscCallMPI(MPI_Comm_size(comm, &comm_size));
382   PetscCall(PetscPrintf(comm,
383                         "\n-- Navier-Stokes solver - libCEED + PETSc --\n"
384                         "  MPI:\n"
385                         "    Host Name                          : %s\n"
386                         "    Total ranks                        : %d\n",
387                         host_name, comm_size));
388 
389   // Problem specific info
390   PetscCall(problem->print_info(user, problem, user->app_ctx));
391 
392   // libCEED
393   const char *used_resource;
394   CeedMemType mem_type_backend;
395   PetscCallCeed(ceed, CeedGetResource(user->ceed, &used_resource));
396   PetscCallCeed(ceed, CeedGetPreferredMemType(user->ceed, &mem_type_backend));
397   PetscCall(PetscPrintf(comm,
398                         "  libCEED:\n"
399                         "    libCEED Backend                    : %s\n"
400                         "    libCEED Backend MemType            : %s\n",
401                         used_resource, CeedMemTypes[mem_type_backend]));
402   // PETSc
403   VecType vec_type;
404   char    box_faces_str[PETSC_MAX_PATH_LEN] = "3,3,3";
405   if (problem->dim == 2) box_faces_str[3] = '\0';
406   PetscCall(PetscOptionsGetString(NULL, NULL, "-dm_plex_box_faces", box_faces_str, sizeof(box_faces_str), NULL));
407   PetscCall(DMGetVecType(user->dm, &vec_type));
408   PetscCall(PetscPrintf(comm,
409                         "  PETSc:\n"
410                         "    Box Faces                          : %s\n"
411                         "    DM VecType                         : %s\n"
412                         "    Time Stepping Scheme               : %s\n",
413                         box_faces_str, vec_type, phys_ctx->implicit ? "implicit" : "explicit"));
414   {
415     char           pmat_type_str[PETSC_MAX_PATH_LEN];
416     MatType        amat_type, pmat_type;
417     Mat            Amat, Pmat;
418     TSIJacobianFn *ijacob_function;
419 
420     PetscCall(TSGetIJacobian(ts, &Amat, &Pmat, &ijacob_function, NULL));
421     PetscCall(MatGetType(Amat, &amat_type));
422     PetscCall(MatGetType(Pmat, &pmat_type));
423 
424     PetscCall(PetscStrncpy(pmat_type_str, pmat_type, sizeof(pmat_type_str)));
425     if (!strcmp(pmat_type, MATCEED)) {
426       MatType pmat_coo_type;
427       char    pmat_coo_type_str[PETSC_MAX_PATH_LEN];
428 
429       PetscCall(MatCeedGetCOOMatType(Pmat, &pmat_coo_type));
430       PetscCall(PetscSNPrintf(pmat_coo_type_str, sizeof(pmat_coo_type_str), " (COO MatType: %s)", pmat_coo_type));
431       PetscCall(PetscStrlcat(pmat_type_str, pmat_coo_type_str, sizeof(pmat_type_str)));
432     }
433     if (ijacob_function) {
434       PetscCall(PetscPrintf(comm,
435                             "    IJacobian A MatType                : %s\n"
436                             "    IJacobian P MatType                : %s\n",
437                             amat_type, pmat_type_str));
438     }
439   }
440   if (user->app_ctx->cont_steps) {
441     PetscCall(PetscPrintf(comm,
442                           "  Continue:\n"
443                           "    Filename:                          : %s\n"
444                           "    Step:                              : %" PetscInt_FMT "\n"
445                           "    Time:                              : %g\n",
446                           user->app_ctx->cont_file, user->app_ctx->cont_steps, user->app_ctx->cont_time));
447   }
448   // Mesh
449   const PetscInt num_comp_q = 5;
450   PetscInt       glob_dofs, owned_dofs, local_dofs;
451   const CeedInt  num_P = user->app_ctx->degree + 1, num_Q = num_P + user->app_ctx->q_extra;
452   PetscCall(DMGetGlobalVectorInfo(user->dm, &owned_dofs, &glob_dofs, NULL));
453   PetscCall(DMGetLocalVectorInfo(user->dm, &local_dofs, NULL, NULL));
454   PetscCall(PetscPrintf(comm,
455                         "  Mesh:\n"
456                         "    Number of 1D Basis Nodes (P)       : %" CeedInt_FMT "\n"
457                         "    Number of 1D Quadrature Points (Q) : %" CeedInt_FMT "\n"
458                         "    Global DoFs                        : %" PetscInt_FMT "\n"
459                         "    DoFs per node                      : %" PetscInt_FMT "\n"
460                         "    Global %" PetscInt_FMT "-DoF nodes                 : %" PetscInt_FMT "\n",
461                         num_P, num_Q, glob_dofs, num_comp_q, num_comp_q, glob_dofs / num_comp_q));
462   // -- Get Partition Statistics
463   PetscCall(PetscPrintf(comm, "  Partition:                             (min,max,median,max/median)\n"));
464   {
465     PetscInt *gather_buffer = NULL;
466     PetscInt  part_owned_dofs[3], part_local_dofs[3], part_boundary_dofs[3], part_neighbors[3];
467     PetscInt  median_index = comm_size % 2 ? comm_size / 2 : comm_size / 2 - 1;
468     if (!rank) PetscCall(PetscMalloc1(comm_size, &gather_buffer));
469 
470     PetscCallMPI(MPI_Gather(&owned_dofs, 1, MPIU_INT, gather_buffer, 1, MPIU_INT, 0, comm));
471     if (!rank) {
472       PetscCall(PetscSortInt(comm_size, gather_buffer));
473       part_owned_dofs[0]             = gather_buffer[0];              // min
474       part_owned_dofs[1]             = gather_buffer[comm_size - 1];  // max
475       part_owned_dofs[2]             = gather_buffer[median_index];   // median
476       PetscReal part_owned_dof_ratio = (PetscReal)part_owned_dofs[1] / (PetscReal)part_owned_dofs[2];
477       PetscCall(PetscPrintf(
478           comm, "    Global Vector %" PetscInt_FMT "-DoF nodes          : %" PetscInt_FMT ", %" PetscInt_FMT ", %" PetscInt_FMT ", %f\n", num_comp_q,
479           part_owned_dofs[0] / num_comp_q, part_owned_dofs[1] / num_comp_q, part_owned_dofs[2] / num_comp_q, part_owned_dof_ratio));
480     }
481 
482     PetscCallMPI(MPI_Gather(&local_dofs, 1, MPIU_INT, gather_buffer, 1, MPIU_INT, 0, comm));
483     if (!rank) {
484       PetscCall(PetscSortInt(comm_size, gather_buffer));
485       part_local_dofs[0]             = gather_buffer[0];              // min
486       part_local_dofs[1]             = gather_buffer[comm_size - 1];  // max
487       part_local_dofs[2]             = gather_buffer[median_index];   // median
488       PetscReal part_local_dof_ratio = (PetscReal)part_local_dofs[1] / (PetscReal)part_local_dofs[2];
489       PetscCall(PetscPrintf(
490           comm, "    Local Vector %" PetscInt_FMT "-DoF nodes           : %" PetscInt_FMT ", %" PetscInt_FMT ", %" PetscInt_FMT ", %f\n", num_comp_q,
491           part_local_dofs[0] / num_comp_q, part_local_dofs[1] / num_comp_q, part_local_dofs[2] / num_comp_q, part_local_dof_ratio));
492     }
493 
494     if (comm_size != 1) {
495       PetscInt num_remote_roots_total = 0, num_remote_leaves_total = 0, num_ghost_interface_ranks = 0, num_owned_interface_ranks = 0;
496       {
497         PetscSF            sf;
498         PetscMPIInt        nrranks, niranks;
499         const PetscInt    *roffset, *rmine, *rremote, *ioffset, *irootloc;
500         const PetscMPIInt *rranks, *iranks;
501 
502         PetscCall(DMGetSectionSF(user->dm, &sf));
503         PetscCall(PetscSFGetRootRanks(sf, &nrranks, &rranks, &roffset, &rmine, &rremote));
504         PetscCall(PetscSFGetLeafRanks(sf, &niranks, &iranks, &ioffset, &irootloc));
505         for (PetscInt i = 0; i < nrranks; i++) {
506           if (rranks[i] == rank) continue;  // Ignore same-part global->local transfers
507           num_remote_roots_total += roffset[i + 1] - roffset[i];
508           num_ghost_interface_ranks++;
509         }
510         for (PetscInt i = 0; i < niranks; i++) {
511           if (iranks[i] == rank) continue;
512           num_remote_leaves_total += ioffset[i + 1] - ioffset[i];
513           num_owned_interface_ranks++;
514         }
515       }
516       PetscCallMPI(MPI_Gather(&num_remote_roots_total, 1, MPIU_INT, gather_buffer, 1, MPIU_INT, 0, comm));
517       if (!rank) {
518         PetscCall(PetscSortInt(comm_size, gather_buffer));
519         part_boundary_dofs[0]           = gather_buffer[0];              // min
520         part_boundary_dofs[1]           = gather_buffer[comm_size - 1];  // max
521         part_boundary_dofs[2]           = gather_buffer[median_index];   // median
522         PetscReal part_shared_dof_ratio = (PetscReal)part_boundary_dofs[1] / (PetscReal)part_boundary_dofs[2];
523         PetscCall(PetscPrintf(
524             comm, "    Ghost Interface %" PetscInt_FMT "-DoF nodes        : %" PetscInt_FMT ", %" PetscInt_FMT ", %" PetscInt_FMT ", %f\n",
525             num_comp_q, part_boundary_dofs[0] / num_comp_q, part_boundary_dofs[1] / num_comp_q, part_boundary_dofs[2] / num_comp_q,
526             part_shared_dof_ratio));
527       }
528 
529       PetscCallMPI(MPI_Gather(&num_ghost_interface_ranks, 1, MPIU_INT, gather_buffer, 1, MPIU_INT, 0, comm));
530       if (!rank) {
531         PetscCall(PetscSortInt(comm_size, gather_buffer));
532         part_neighbors[0]              = gather_buffer[0];              // min
533         part_neighbors[1]              = gather_buffer[comm_size - 1];  // max
534         part_neighbors[2]              = gather_buffer[median_index];   // median
535         PetscReal part_neighbors_ratio = (PetscReal)part_neighbors[1] / (PetscReal)part_neighbors[2];
536         PetscCall(PetscPrintf(comm, "    Ghost Interface Ranks              : %" PetscInt_FMT ", %" PetscInt_FMT ", %" PetscInt_FMT ", %f\n",
537                               part_neighbors[0], part_neighbors[1], part_neighbors[2], part_neighbors_ratio));
538       }
539 
540       PetscCallMPI(MPI_Gather(&num_remote_leaves_total, 1, MPIU_INT, gather_buffer, 1, MPIU_INT, 0, comm));
541       if (!rank) {
542         PetscCall(PetscSortInt(comm_size, gather_buffer));
543         part_boundary_dofs[0]           = gather_buffer[0];              // min
544         part_boundary_dofs[1]           = gather_buffer[comm_size - 1];  // max
545         part_boundary_dofs[2]           = gather_buffer[median_index];   // median
546         PetscReal part_shared_dof_ratio = (PetscReal)part_boundary_dofs[1] / (PetscReal)part_boundary_dofs[2];
547         PetscCall(PetscPrintf(
548             comm, "    Owned Interface %" PetscInt_FMT "-DoF nodes        : %" PetscInt_FMT ", %" PetscInt_FMT ", %" PetscInt_FMT ", %f\n",
549             num_comp_q, part_boundary_dofs[0] / num_comp_q, part_boundary_dofs[1] / num_comp_q, part_boundary_dofs[2] / num_comp_q,
550             part_shared_dof_ratio));
551       }
552 
553       PetscCallMPI(MPI_Gather(&num_owned_interface_ranks, 1, MPIU_INT, gather_buffer, 1, MPIU_INT, 0, comm));
554       if (!rank) {
555         PetscCall(PetscSortInt(comm_size, gather_buffer));
556         part_neighbors[0]              = gather_buffer[0];              // min
557         part_neighbors[1]              = gather_buffer[comm_size - 1];  // max
558         part_neighbors[2]              = gather_buffer[median_index];   // median
559         PetscReal part_neighbors_ratio = (PetscReal)part_neighbors[1] / (PetscReal)part_neighbors[2];
560         PetscCall(PetscPrintf(comm, "    Owned Interface Ranks              : %" PetscInt_FMT ", %" PetscInt_FMT ", %" PetscInt_FMT ", %f\n",
561                               part_neighbors[0], part_neighbors[1], part_neighbors[2], part_neighbors_ratio));
562       }
563     }
564 
565     if (!rank) PetscCall(PetscFree(gather_buffer));
566   }
567   PetscFunctionReturn(PETSC_SUCCESS);
568 }
569