xref: /libCEED/examples/fluids/src/misc.c (revision ed1ebff7abaa668c9b274382abc627b7a8754a20)
1 // Copyright (c) 2017-2025, Lawrence Livermore National Security, LLC and other CEED contributors.
2 // All Rights Reserved. See the top-level LICENSE and NOTICE files for details.
3 //
4 // SPDX-License-Identifier: BSD-2-Clause
5 //
6 // This file is part of CEED:  http://github.com/ceed
7 
8 /// @file
9 /// Miscellaneous utility functions
10 
11 #include <ceed.h>
12 #include <petscdm.h>
13 #include <petscsf.h>
14 #include <petscts.h>
15 
16 #include "../navierstokes.h"
17 #include "../qfunctions/mass.h"
18 
19 PetscErrorCode ICs_FixMultiplicity(DM dm, CeedData ceed_data, User user, Vec Q_loc, Vec Q, CeedScalar time) {
20   Ceed         ceed = user->ceed;
21   CeedVector   mult_vec;
22   PetscMemType m_mem_type;
23   Vec          Multiplicity, Multiplicity_loc;
24 
25   PetscFunctionBeginUser;
26   if (user->phys->ics_time_label) PetscCallCeed(ceed, CeedOperatorSetContextDouble(ceed_data->op_ics_ctx->op, user->phys->ics_time_label, &time));
27   PetscCall(ApplyCeedOperatorLocalToGlobal(NULL, Q, ceed_data->op_ics_ctx));
28 
29   PetscCallCeed(ceed, CeedElemRestrictionCreateVector(ceed_data->elem_restr_q, &mult_vec, NULL));
30 
31   // -- Get multiplicity
32   PetscCall(DMGetLocalVector(dm, &Multiplicity_loc));
33   PetscCall(VecPetscToCeed(Multiplicity_loc, &m_mem_type, mult_vec));
34   PetscCallCeed(ceed, CeedElemRestrictionGetMultiplicity(ceed_data->elem_restr_q, mult_vec));
35   PetscCall(VecCeedToPetsc(mult_vec, m_mem_type, Multiplicity_loc));
36 
37   PetscCall(DMGetGlobalVector(dm, &Multiplicity));
38   PetscCall(VecZeroEntries(Multiplicity));
39   PetscCall(DMLocalToGlobal(dm, Multiplicity_loc, ADD_VALUES, Multiplicity));
40 
41   // -- Fix multiplicity
42   PetscCall(VecPointwiseDivide(Q, Q, Multiplicity));
43   PetscCall(VecPointwiseDivide(Q_loc, Q_loc, Multiplicity_loc));
44 
45   PetscCall(DMRestoreLocalVector(dm, &Multiplicity_loc));
46   PetscCall(DMRestoreGlobalVector(dm, &Multiplicity));
47   PetscCallCeed(ceed, CeedVectorDestroy(&mult_vec));
48   PetscFunctionReturn(PETSC_SUCCESS);
49 }
50 
51 // Record boundary values from initial condition
52 PetscErrorCode SetBCsFromICs(DM dm, Vec Q, Vec Q_loc) {
53   PetscFunctionBeginUser;
54   {  // Capture initial condition values in Qbc
55     Vec Qbc;
56 
57     PetscCall(DMGetNamedLocalVector(dm, "Qbc", &Qbc));
58     PetscCall(VecCopy(Q_loc, Qbc));
59     PetscCall(VecZeroEntries(Q_loc));
60     PetscCall(DMGlobalToLocal(dm, Q, INSERT_VALUES, Q_loc));
61     PetscCall(VecAXPY(Qbc, -1., Q_loc));
62     PetscCall(DMRestoreNamedLocalVector(dm, "Qbc", &Qbc));
63   }
64   PetscCall(PetscObjectComposeFunction((PetscObject)dm, "DMPlexInsertBoundaryValues_C", DMPlexInsertBoundaryValues_FromICs));
65 
66   {  // Set boundary mask to zero out essential BCs
67     Vec boundary_mask, ones;
68 
69     PetscCall(DMGetNamedLocalVector(dm, "boundary mask", &boundary_mask));
70     PetscCall(DMGetGlobalVector(dm, &ones));
71     PetscCall(VecZeroEntries(boundary_mask));
72     PetscCall(VecSet(ones, 1.0));
73     PetscCall(DMGlobalToLocal(dm, ones, INSERT_VALUES, boundary_mask));
74     PetscCall(DMRestoreNamedLocalVector(dm, "boundary mask", &boundary_mask));
75     PetscCall(DMRestoreGlobalVector(dm, &ones));
76   }
77   PetscFunctionReturn(PETSC_SUCCESS);
78 }
79 
80 PetscErrorCode DMPlexInsertBoundaryValues_FromICs(DM dm, PetscBool insert_essential, Vec Q_loc, PetscReal time, Vec face_geom_FVM, Vec cell_geom_FVM,
81                                                   Vec grad_FVM) {
82   Vec Qbc, boundary_mask;
83 
84   PetscFunctionBeginUser;
85   // Mask (zero) Strong BC entries
86   PetscCall(DMGetNamedLocalVector(dm, "boundary mask", &boundary_mask));
87   PetscCall(VecPointwiseMult(Q_loc, Q_loc, boundary_mask));
88   PetscCall(DMRestoreNamedLocalVector(dm, "boundary mask", &boundary_mask));
89 
90   PetscCall(DMGetNamedLocalVector(dm, "Qbc", &Qbc));
91   PetscCall(VecAXPY(Q_loc, 1., Qbc));
92   PetscCall(DMRestoreNamedLocalVector(dm, "Qbc", &Qbc));
93   PetscFunctionReturn(PETSC_SUCCESS);
94 }
95 
96 static PetscErrorCode BinaryReadIntoInt(PetscViewer viewer, PetscInt *out, PetscDataType file_type) {
97   PetscFunctionBeginUser;
98   *out = -13;  // appease the overzealous GCC compiler warning Gods
99   if (file_type == PETSC_INT32) {
100     PetscInt32 val;
101     PetscCall(PetscViewerBinaryRead(viewer, &val, 1, NULL, PETSC_INT32));
102     *out = val;
103   } else if (file_type == PETSC_INT64) {
104     PetscInt64 val;
105     PetscCall(PetscViewerBinaryRead(viewer, &val, 1, NULL, PETSC_INT64));
106     *out = val;
107   } else {
108     PetscCall(PetscViewerBinaryRead(viewer, out, 1, NULL, PETSC_INT));
109   }
110   PetscFunctionReturn(PETSC_SUCCESS);
111 }
112 
113 // @brief Load vector from binary file, possibly with embedded solution time and step number
114 PetscErrorCode LoadFluidsBinaryVec(MPI_Comm comm, PetscViewer viewer, Vec Q, PetscReal *time, PetscInt *step_number) {
115   PetscInt      file_step_number;
116   PetscInt32    token;
117   PetscReal     file_time;
118   PetscDataType file_type = PETSC_INT32;
119 
120   PetscFunctionBeginUser;
121   PetscCall(PetscViewerBinaryRead(viewer, &token, 1, NULL, PETSC_INT32));
122   if (token == FLUIDS_FILE_TOKEN_32 || token == FLUIDS_FILE_TOKEN_64 ||
123       token == FLUIDS_FILE_TOKEN) {  // New style format; we're reading a file with step number and time in the header
124     if (token == FLUIDS_FILE_TOKEN_32) file_type = PETSC_INT32;
125     else if (token == FLUIDS_FILE_TOKEN_64) file_type = PETSC_INT64;
126     PetscCall(BinaryReadIntoInt(viewer, &file_step_number, file_type));
127     PetscCall(PetscViewerBinaryRead(viewer, &file_time, 1, NULL, PETSC_REAL));
128     if (time) *time = file_time;
129     if (step_number) *step_number = file_step_number;
130   } else if (token == VEC_FILE_CLASSID) {  // Legacy format of just the vector, encoded as [VEC_FILE_CLASSID, length, ]
131     PetscInt length, N;
132     PetscCall(BinaryReadIntoInt(viewer, &length, file_type));
133     PetscCall(VecGetSize(Q, &N));
134     PetscCheck(length == N, comm, PETSC_ERR_ARG_INCOMP, "File Vec has length %" PetscInt_FMT " but DM has global Vec size %" PetscInt_FMT, length, N);
135     PetscCall(PetscViewerBinarySetSkipHeader(viewer, PETSC_TRUE));
136   } else SETERRQ(comm, PETSC_ERR_FILE_UNEXPECTED, "Not a fluids header token or a PETSc Vec in file");
137 
138   PetscCall(VecLoad(Q, viewer));
139   PetscFunctionReturn(PETSC_SUCCESS);
140 }
141 
142 // Compare reference solution values with current test run for CI
143 PetscErrorCode RegressionTest(AppCtx app_ctx, Vec Q) {
144   Vec         Q_ref;
145   PetscViewer viewer;
146   PetscReal   error, norm_Q, norm_Q_ref;
147   MPI_Comm    comm = PetscObjectComm((PetscObject)Q);
148 
149   PetscFunctionBeginUser;
150   // Read reference file
151   PetscCall(VecDuplicate(Q, &Q_ref));
152   PetscCheck(strcmp(app_ctx->test_file_path, "") != 0, comm, PETSC_ERR_FILE_READ, "File for regression test not given");
153   PetscCall(PetscViewerBinaryOpen(comm, app_ctx->test_file_path, FILE_MODE_READ, &viewer));
154   PetscCall(LoadFluidsBinaryVec(comm, viewer, Q_ref, NULL, NULL));
155 
156   // Compute error with respect to reference solution
157   PetscCall(VecNorm(Q_ref, NORM_MAX, &norm_Q));
158   PetscCall(VecNorm(Q_ref, NORM_MAX, &norm_Q_ref));
159   PetscCall(VecAXPY(Q, -1.0, Q_ref));
160   PetscCall(VecScale(Q, 1. / norm_Q_ref));
161   PetscCall(VecNorm(Q, NORM_MAX, &error));
162 
163   // Check error
164   if (error > app_ctx->test_tol) {
165     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Test failed with error norm %g\nReference solution max norm: %g Computed solution max norm %g\n",
166                           (double)error, (double)norm_Q_ref, (double)norm_Q));
167   }
168 
169   // Cleanup
170   PetscCall(PetscViewerDestroy(&viewer));
171   PetscCall(VecDestroy(&Q_ref));
172   PetscFunctionReturn(PETSC_SUCCESS);
173 }
174 
175 // Get error for problems with exact solutions
176 PetscErrorCode PrintError(CeedData ceed_data, DM dm, User user, Vec Q, PetscScalar final_time) {
177   PetscInt  loc_nodes;
178   Vec       Q_exact, Q_exact_loc;
179   PetscReal rel_error, norm_error, norm_exact;
180 
181   PetscFunctionBeginUser;
182   // Get exact solution at final time
183   PetscCall(DMGetGlobalVector(dm, &Q_exact));
184   PetscCall(DMGetLocalVector(dm, &Q_exact_loc));
185   PetscCall(VecGetSize(Q_exact_loc, &loc_nodes));
186   PetscCall(ICs_FixMultiplicity(dm, ceed_data, user, Q_exact_loc, Q_exact, final_time));
187 
188   // Get |exact solution - obtained solution|
189   PetscCall(VecNorm(Q_exact, NORM_1, &norm_exact));
190   PetscCall(VecAXPY(Q, -1.0, Q_exact));
191   PetscCall(VecNorm(Q, NORM_1, &norm_error));
192 
193   rel_error = norm_error / norm_exact;
194   PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Relative Error: %g\n", (double)rel_error));
195   PetscCall(DMRestoreLocalVector(dm, &Q_exact_loc));
196   PetscCall(DMRestoreGlobalVector(dm, &Q_exact));
197   PetscFunctionReturn(PETSC_SUCCESS);
198 }
199 
200 // Post-processing
201 PetscErrorCode PostProcess(TS ts, CeedData ceed_data, DM dm, ProblemData problem, User user, Vec Q, PetscScalar final_time) {
202   PetscInt          steps;
203   TSConvergedReason reason;
204 
205   PetscFunctionBeginUser;
206   // Print relative error
207   if (problem->compute_exact_solution_error && user->app_ctx->test_type == TESTTYPE_NONE) {
208     PetscCall(PrintError(ceed_data, dm, user, Q, final_time));
209   }
210 
211   // Print final time and number of steps
212   PetscCall(TSGetStepNumber(ts, &steps));
213   PetscCall(TSGetConvergedReason(ts, &reason));
214   if (user->app_ctx->test_type == TESTTYPE_NONE) {
215     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Time integrator %s on time step %" PetscInt_FMT " with final time %g\n", TSConvergedReasons[reason],
216                           steps, (double)final_time));
217   }
218 
219   // Output numerical values from command line
220   PetscCall(VecViewFromOptions(Q, NULL, "-vec_view"));
221 
222   // Compare reference solution values with current test run for CI
223   if (user->app_ctx->test_type == TESTTYPE_SOLVER) {
224     PetscCall(RegressionTest(user->app_ctx, Q));
225   }
226   PetscFunctionReturn(PETSC_SUCCESS);
227 }
228 
229 const PetscInt32 FLUIDS_FILE_TOKEN    = 0xceedf00;  // for backwards compatibility
230 const PetscInt32 FLUIDS_FILE_TOKEN_32 = 0xceedf32;
231 const PetscInt32 FLUIDS_FILE_TOKEN_64 = 0xceedf64;
232 
233 // Gather initial Q values in case of continuation of simulation
234 PetscErrorCode SetupICsFromBinary(MPI_Comm comm, AppCtx app_ctx, Vec Q) {
235   PetscViewer viewer;
236 
237   PetscFunctionBeginUser;
238   PetscCall(PetscViewerBinaryOpen(comm, app_ctx->cont_file, FILE_MODE_READ, &viewer));
239   PetscCall(LoadFluidsBinaryVec(comm, viewer, Q, &app_ctx->cont_time, &app_ctx->cont_steps));
240   PetscCall(PetscViewerDestroy(&viewer));
241   PetscFunctionReturn(PETSC_SUCCESS);
242 }
243 
244 // Free a plain data context that was allocated using PETSc; returning libCEED error codes
245 int FreeContextPetsc(void *data) {
246   if (PetscFree(data)) return CeedError(NULL, CEED_ERROR_ACCESS, "PetscFree failed");
247   return CEED_ERROR_SUCCESS;
248 }
249 
250 // Return mass qfunction specification for number of components N
251 PetscErrorCode CreateMassQFunction(Ceed ceed, CeedInt N, CeedInt q_data_size, CeedQFunction *qf) {
252   PetscFunctionBeginUser;
253   switch (N) {
254     case 1:
255       PetscCallCeed(ceed, CeedQFunctionCreateInterior(ceed, 1, Mass_1, Mass_1_loc, qf));
256       break;
257     case 5:
258       PetscCallCeed(ceed, CeedQFunctionCreateInterior(ceed, 1, Mass_5, Mass_5_loc, qf));
259       break;
260     case 7:
261       PetscCallCeed(ceed, CeedQFunctionCreateInterior(ceed, 1, Mass_7, Mass_7_loc, qf));
262       break;
263     case 9:
264       PetscCallCeed(ceed, CeedQFunctionCreateInterior(ceed, 1, Mass_9, Mass_9_loc, qf));
265       break;
266     case 22:
267       PetscCallCeed(ceed, CeedQFunctionCreateInterior(ceed, 1, Mass_22, Mass_22_loc, qf));
268       break;
269     default:
270       SETERRQ(PETSC_COMM_WORLD, PETSC_ERR_SUP, "Could not find mass qfunction of size %d", N);
271   }
272 
273   PetscCallCeed(ceed, CeedQFunctionAddInput(*qf, "u", N, CEED_EVAL_INTERP));
274   PetscCallCeed(ceed, CeedQFunctionAddInput(*qf, "qdata", q_data_size, CEED_EVAL_NONE));
275   PetscCallCeed(ceed, CeedQFunctionAddOutput(*qf, "v", N, CEED_EVAL_INTERP));
276   PetscCallCeed(ceed, CeedQFunctionSetUserFlopsEstimate(*qf, N));
277   PetscFunctionReturn(PETSC_SUCCESS);
278 }
279 
280 PetscErrorCode NodalProjectionDataDestroy(NodalProjectionData context) {
281   PetscFunctionBeginUser;
282   if (context == NULL) PetscFunctionReturn(PETSC_SUCCESS);
283 
284   PetscCall(DMDestroy(&context->dm));
285   PetscCall(KSPDestroy(&context->ksp));
286 
287   PetscCall(OperatorApplyContextDestroy(context->l2_rhs_ctx));
288 
289   PetscCall(PetscFree(context));
290   PetscFunctionReturn(PETSC_SUCCESS);
291 }
292 
293 /*
294  * @brief Open a PHASTA *.dat file, grabbing dimensions and file pointer
295  *
296  * This function opens the file specified by `path` using `PetscFOpen` and passes the file pointer in `fp`.
297  * It is not closed in this function, thus `fp` must be closed sometime after this function has been called (using `PetscFClose` for example).
298  *
299  * Assumes that the first line of the file has the number of rows and columns as the only two entries, separated by a single space.
300  *
301  * @param[in]  comm           MPI_Comm for the program
302  * @param[in]  path           Path to the file
303  * @param[in]  char_array_len Length of the character array that should contain each line
304  * @param[out] dims           Dimensions of the file, taken from the first line of the file
305  * @param[out] fp File        pointer to the opened file
306  */
307 PetscErrorCode PhastaDatFileOpen(const MPI_Comm comm, const char path[PETSC_MAX_PATH_LEN], const PetscInt char_array_len, PetscInt dims[2],
308                                  FILE **fp) {
309   int    ndims;
310   char   line[char_array_len];
311   char **array;
312 
313   PetscFunctionBeginUser;
314   PetscCall(PetscFOpen(comm, path, "r", fp));
315   PetscCall(PetscSynchronizedFGets(comm, *fp, char_array_len, line));
316   PetscCall(PetscStrToArray(line, ' ', &ndims, &array));
317   PetscCheck(ndims == 2, comm, PETSC_ERR_FILE_UNEXPECTED, "Found %d dimensions instead of 2 on the first line of %s", ndims, path);
318 
319   for (PetscInt i = 0; i < ndims; i++) dims[i] = atoi(array[i]);
320   PetscCall(PetscStrToArrayDestroy(ndims, array));
321   PetscFunctionReturn(PETSC_SUCCESS);
322 }
323 
324 /*
325  * @brief Get the number of rows for the PHASTA file at path.
326  *
327  * Assumes that the first line of the file has the number of rows and columns as the only two entries, separated by a single space.
328  *
329  * @param[in]  comm  MPI_Comm for the program
330  * @param[in]  path  Path to the file
331  * @param[out] nrows Number of rows
332  */
333 PetscErrorCode PhastaDatFileGetNRows(const MPI_Comm comm, const char path[PETSC_MAX_PATH_LEN], PetscInt *nrows) {
334   const PetscInt char_array_len = 512;
335   PetscInt       dims[2];
336   FILE          *fp;
337 
338   PetscFunctionBeginUser;
339   PetscCall(PhastaDatFileOpen(comm, path, char_array_len, dims, &fp));
340   *nrows = dims[0];
341   PetscCall(PetscFClose(comm, fp));
342   PetscFunctionReturn(PETSC_SUCCESS);
343 }
344 
345 PetscErrorCode PhastaDatFileReadToArrayReal(MPI_Comm comm, const char path[PETSC_MAX_PATH_LEN], PetscReal array[]) {
346   PetscInt       dims[2];
347   FILE          *fp;
348   const PetscInt char_array_len = 512;
349   char           line[char_array_len];
350 
351   PetscFunctionBeginUser;
352   PetscCall(PhastaDatFileOpen(comm, path, char_array_len, dims, &fp));
353 
354   for (PetscInt i = 0; i < dims[0]; i++) {
355     int    ndims;
356     char **row_array;
357 
358     PetscCall(PetscSynchronizedFGets(comm, fp, char_array_len, line));
359     PetscCall(PetscStrToArray(line, ' ', &ndims, &row_array));
360     PetscCheck(ndims == dims[1], comm, PETSC_ERR_FILE_UNEXPECTED,
361                "Line %" PetscInt_FMT " of %s does not contain enough columns (%d instead of %" PetscInt_FMT ")", i, path, ndims, dims[1]);
362 
363     for (PetscInt j = 0; j < dims[1]; j++) array[i * dims[1] + j] = (PetscReal)atof(row_array[j]);
364     PetscCall(PetscStrToArrayDestroy(ndims, row_array));
365   }
366 
367   PetscCall(PetscFClose(comm, fp));
368   PetscFunctionReturn(PETSC_SUCCESS);
369 }
370 
371 // Print information about the given simulation run
372 PetscErrorCode PrintRunInfo(User user, Physics phys_ctx, ProblemData problem, TS ts) {
373   Ceed     ceed = user->ceed;
374   MPI_Comm comm = PetscObjectComm((PetscObject)ts);
375 
376   PetscFunctionBeginUser;
377   // Header and rank
378   char        host_name[PETSC_MAX_PATH_LEN];
379   PetscMPIInt rank, comm_size;
380   PetscCall(PetscGetHostName(host_name, sizeof host_name));
381   PetscCallMPI(MPI_Comm_rank(comm, &rank));
382   PetscCallMPI(MPI_Comm_size(comm, &comm_size));
383   PetscCall(PetscPrintf(comm,
384                         "\n-- Navier-Stokes solver - libCEED + PETSc --\n"
385                         "  MPI:\n"
386                         "    Host Name                          : %s\n"
387                         "    Total ranks                        : %d\n",
388                         host_name, comm_size));
389 
390   // Problem specific info
391   PetscCall(problem->print_info(user, problem, user->app_ctx));
392 
393   // libCEED
394   const char *used_resource;
395   CeedMemType mem_type_backend;
396   PetscCallCeed(ceed, CeedGetResource(user->ceed, &used_resource));
397   PetscCallCeed(ceed, CeedGetPreferredMemType(user->ceed, &mem_type_backend));
398   PetscCall(PetscPrintf(comm,
399                         "  libCEED:\n"
400                         "    libCEED Backend                    : %s\n"
401                         "    libCEED Backend MemType            : %s\n",
402                         used_resource, CeedMemTypes[mem_type_backend]));
403   // PETSc
404   VecType vec_type;
405   char    box_faces_str[PETSC_MAX_PATH_LEN] = "3,3,3";
406   if (problem->dim == 2) box_faces_str[3] = '\0';
407   PetscCall(PetscOptionsGetString(NULL, NULL, "-dm_plex_box_faces", box_faces_str, sizeof(box_faces_str), NULL));
408   PetscCall(DMGetVecType(user->dm, &vec_type));
409   PetscCall(PetscPrintf(comm,
410                         "  PETSc:\n"
411                         "    Box Faces                          : %s\n"
412                         "    DM VecType                         : %s\n"
413                         "    Time Stepping Scheme               : %s\n",
414                         box_faces_str, vec_type, phys_ctx->implicit ? "implicit" : "explicit"));
415   {
416     char           pmat_type_str[PETSC_MAX_PATH_LEN];
417     MatType        amat_type, pmat_type;
418     Mat            Amat, Pmat;
419     TSIJacobianFn *ijacob_function;
420 
421     PetscCall(TSGetIJacobian(ts, &Amat, &Pmat, &ijacob_function, NULL));
422     PetscCall(MatGetType(Amat, &amat_type));
423     PetscCall(MatGetType(Pmat, &pmat_type));
424 
425     PetscCall(PetscStrncpy(pmat_type_str, pmat_type, sizeof(pmat_type_str)));
426     if (!strcmp(pmat_type, MATCEED)) {
427       MatType pmat_coo_type;
428       char    pmat_coo_type_str[PETSC_MAX_PATH_LEN];
429 
430       PetscCall(MatCeedGetCOOMatType(Pmat, &pmat_coo_type));
431       PetscCall(PetscSNPrintf(pmat_coo_type_str, sizeof(pmat_coo_type_str), " (COO MatType: %s)", pmat_coo_type));
432       PetscCall(PetscStrlcat(pmat_type_str, pmat_coo_type_str, sizeof(pmat_type_str)));
433     }
434     if (ijacob_function) {
435       PetscCall(PetscPrintf(comm,
436                             "    IJacobian A MatType                : %s\n"
437                             "    IJacobian P MatType                : %s\n",
438                             amat_type, pmat_type_str));
439     }
440   }
441   if (user->app_ctx->cont_steps) {
442     PetscCall(PetscPrintf(comm,
443                           "  Continue:\n"
444                           "    Filename:                          : %s\n"
445                           "    Step:                              : %" PetscInt_FMT "\n"
446                           "    Time:                              : %g\n",
447                           user->app_ctx->cont_file, user->app_ctx->cont_steps, user->app_ctx->cont_time));
448   }
449   // Mesh
450   const PetscInt num_comp_q = 5;
451   PetscInt       glob_dofs, owned_dofs, local_dofs;
452   const CeedInt  num_P = user->app_ctx->degree + 1, num_Q = num_P + user->app_ctx->q_extra;
453   PetscCall(DMGetGlobalVectorInfo(user->dm, &owned_dofs, &glob_dofs, NULL));
454   PetscCall(DMGetLocalVectorInfo(user->dm, &local_dofs, NULL, NULL));
455   PetscCall(PetscPrintf(comm,
456                         "  Mesh:\n"
457                         "    Number of 1D Basis Nodes (P)       : %" CeedInt_FMT "\n"
458                         "    Number of 1D Quadrature Points (Q) : %" CeedInt_FMT "\n"
459                         "    Global DoFs                        : %" PetscInt_FMT "\n"
460                         "    DoFs per node                      : %" PetscInt_FMT "\n"
461                         "    Global %" PetscInt_FMT "-DoF nodes                 : %" PetscInt_FMT "\n",
462                         num_P, num_Q, glob_dofs, num_comp_q, num_comp_q, glob_dofs / num_comp_q));
463   // -- Get Partition Statistics
464   PetscCall(PetscPrintf(comm, "  Partition:                             (min,max,median,max/median)\n"));
465   {
466     PetscInt *gather_buffer = NULL;
467     PetscInt  part_owned_dofs[3], part_local_dofs[3], part_boundary_dofs[3], part_neighbors[3];
468     PetscInt  median_index = comm_size % 2 ? comm_size / 2 : comm_size / 2 - 1;
469     if (!rank) PetscCall(PetscMalloc1(comm_size, &gather_buffer));
470 
471     PetscCallMPI(MPI_Gather(&owned_dofs, 1, MPIU_INT, gather_buffer, 1, MPIU_INT, 0, comm));
472     if (!rank) {
473       PetscCall(PetscSortInt(comm_size, gather_buffer));
474       part_owned_dofs[0]             = gather_buffer[0];              // min
475       part_owned_dofs[1]             = gather_buffer[comm_size - 1];  // max
476       part_owned_dofs[2]             = gather_buffer[median_index];   // median
477       PetscReal part_owned_dof_ratio = (PetscReal)part_owned_dofs[1] / (PetscReal)part_owned_dofs[2];
478       PetscCall(PetscPrintf(
479           comm, "    Global Vector %" PetscInt_FMT "-DoF nodes          : %" PetscInt_FMT ", %" PetscInt_FMT ", %" PetscInt_FMT ", %f\n", num_comp_q,
480           part_owned_dofs[0] / num_comp_q, part_owned_dofs[1] / num_comp_q, part_owned_dofs[2] / num_comp_q, part_owned_dof_ratio));
481     }
482 
483     PetscCallMPI(MPI_Gather(&local_dofs, 1, MPIU_INT, gather_buffer, 1, MPIU_INT, 0, comm));
484     if (!rank) {
485       PetscCall(PetscSortInt(comm_size, gather_buffer));
486       part_local_dofs[0]             = gather_buffer[0];              // min
487       part_local_dofs[1]             = gather_buffer[comm_size - 1];  // max
488       part_local_dofs[2]             = gather_buffer[median_index];   // median
489       PetscReal part_local_dof_ratio = (PetscReal)part_local_dofs[1] / (PetscReal)part_local_dofs[2];
490       PetscCall(PetscPrintf(
491           comm, "    Local Vector %" PetscInt_FMT "-DoF nodes           : %" PetscInt_FMT ", %" PetscInt_FMT ", %" PetscInt_FMT ", %f\n", num_comp_q,
492           part_local_dofs[0] / num_comp_q, part_local_dofs[1] / num_comp_q, part_local_dofs[2] / num_comp_q, part_local_dof_ratio));
493     }
494 
495     if (comm_size != 1) {
496       PetscInt num_remote_roots_total = 0, num_remote_leaves_total = 0, num_ghost_interface_ranks = 0, num_owned_interface_ranks = 0;
497       {
498         PetscSF            sf;
499         PetscMPIInt        nrranks, niranks;
500         const PetscInt    *roffset, *rmine, *rremote, *ioffset, *irootloc;
501         const PetscMPIInt *rranks, *iranks;
502 
503         PetscCall(DMGetSectionSF(user->dm, &sf));
504         PetscCall(PetscSFGetRootRanks(sf, &nrranks, &rranks, &roffset, &rmine, &rremote));
505         PetscCall(PetscSFGetLeafRanks(sf, &niranks, &iranks, &ioffset, &irootloc));
506         for (PetscInt i = 0; i < nrranks; i++) {
507           if (rranks[i] == rank) continue;  // Ignore same-part global->local transfers
508           num_remote_roots_total += roffset[i + 1] - roffset[i];
509           num_ghost_interface_ranks++;
510         }
511         for (PetscInt i = 0; i < niranks; i++) {
512           if (iranks[i] == rank) continue;
513           num_remote_leaves_total += ioffset[i + 1] - ioffset[i];
514           num_owned_interface_ranks++;
515         }
516       }
517       PetscCallMPI(MPI_Gather(&num_remote_roots_total, 1, MPIU_INT, gather_buffer, 1, MPIU_INT, 0, comm));
518       if (!rank) {
519         PetscCall(PetscSortInt(comm_size, gather_buffer));
520         part_boundary_dofs[0]           = gather_buffer[0];              // min
521         part_boundary_dofs[1]           = gather_buffer[comm_size - 1];  // max
522         part_boundary_dofs[2]           = gather_buffer[median_index];   // median
523         PetscReal part_shared_dof_ratio = (PetscReal)part_boundary_dofs[1] / (PetscReal)part_boundary_dofs[2];
524         PetscCall(PetscPrintf(
525             comm, "    Ghost Interface %" PetscInt_FMT "-DoF nodes        : %" PetscInt_FMT ", %" PetscInt_FMT ", %" PetscInt_FMT ", %f\n",
526             num_comp_q, part_boundary_dofs[0] / num_comp_q, part_boundary_dofs[1] / num_comp_q, part_boundary_dofs[2] / num_comp_q,
527             part_shared_dof_ratio));
528       }
529 
530       PetscCallMPI(MPI_Gather(&num_ghost_interface_ranks, 1, MPIU_INT, gather_buffer, 1, MPIU_INT, 0, comm));
531       if (!rank) {
532         PetscCall(PetscSortInt(comm_size, gather_buffer));
533         part_neighbors[0]              = gather_buffer[0];              // min
534         part_neighbors[1]              = gather_buffer[comm_size - 1];  // max
535         part_neighbors[2]              = gather_buffer[median_index];   // median
536         PetscReal part_neighbors_ratio = (PetscReal)part_neighbors[1] / (PetscReal)part_neighbors[2];
537         PetscCall(PetscPrintf(comm, "    Ghost Interface Ranks              : %" PetscInt_FMT ", %" PetscInt_FMT ", %" PetscInt_FMT ", %f\n",
538                               part_neighbors[0], part_neighbors[1], part_neighbors[2], part_neighbors_ratio));
539       }
540 
541       PetscCallMPI(MPI_Gather(&num_remote_leaves_total, 1, MPIU_INT, gather_buffer, 1, MPIU_INT, 0, comm));
542       if (!rank) {
543         PetscCall(PetscSortInt(comm_size, gather_buffer));
544         part_boundary_dofs[0]           = gather_buffer[0];              // min
545         part_boundary_dofs[1]           = gather_buffer[comm_size - 1];  // max
546         part_boundary_dofs[2]           = gather_buffer[median_index];   // median
547         PetscReal part_shared_dof_ratio = (PetscReal)part_boundary_dofs[1] / (PetscReal)part_boundary_dofs[2];
548         PetscCall(PetscPrintf(
549             comm, "    Owned Interface %" PetscInt_FMT "-DoF nodes        : %" PetscInt_FMT ", %" PetscInt_FMT ", %" PetscInt_FMT ", %f\n",
550             num_comp_q, part_boundary_dofs[0] / num_comp_q, part_boundary_dofs[1] / num_comp_q, part_boundary_dofs[2] / num_comp_q,
551             part_shared_dof_ratio));
552       }
553 
554       PetscCallMPI(MPI_Gather(&num_owned_interface_ranks, 1, MPIU_INT, gather_buffer, 1, MPIU_INT, 0, comm));
555       if (!rank) {
556         PetscCall(PetscSortInt(comm_size, gather_buffer));
557         part_neighbors[0]              = gather_buffer[0];              // min
558         part_neighbors[1]              = gather_buffer[comm_size - 1];  // max
559         part_neighbors[2]              = gather_buffer[median_index];   // median
560         PetscReal part_neighbors_ratio = (PetscReal)part_neighbors[1] / (PetscReal)part_neighbors[2];
561         PetscCall(PetscPrintf(comm, "    Owned Interface Ranks              : %" PetscInt_FMT ", %" PetscInt_FMT ", %" PetscInt_FMT ", %f\n",
562                               part_neighbors[0], part_neighbors[1], part_neighbors[2], part_neighbors_ratio));
563       }
564     }
565 
566     if (!rank) PetscCall(PetscFree(gather_buffer));
567   }
568   PetscFunctionReturn(PETSC_SUCCESS);
569 }
570