xref: /libCEED/examples/fluids/src/misc.c (revision 29ec485eb72075150292bd9d6291eab6d473a1fc)
1 // Copyright (c) 2017-2024, Lawrence Livermore National Security, LLC and other CEED contributors.
2 // All Rights Reserved. See the top-level LICENSE and NOTICE files for details.
3 //
4 // SPDX-License-Identifier: BSD-2-Clause
5 //
6 // This file is part of CEED:  http://github.com/ceed
7 
8 /// @file
9 /// Miscellaneous utility functions
10 
11 #include <ceed.h>
12 #include <petscdm.h>
13 #include <petscsf.h>
14 #include <petscts.h>
15 
16 #include "../navierstokes.h"
17 #include "../qfunctions/mass.h"
18 
19 PetscErrorCode ICs_FixMultiplicity(DM dm, CeedData ceed_data, User user, Vec Q_loc, Vec Q, CeedScalar time) {
20   Ceed         ceed = user->ceed;
21   CeedVector   mult_vec;
22   PetscMemType m_mem_type;
23   Vec          Multiplicity, Multiplicity_loc;
24 
25   PetscFunctionBeginUser;
26   if (user->phys->ics_time_label) PetscCallCeed(ceed, CeedOperatorSetContextDouble(ceed_data->op_ics_ctx->op, user->phys->ics_time_label, &time));
27   PetscCall(ApplyCeedOperatorLocalToGlobal(NULL, Q, ceed_data->op_ics_ctx));
28 
29   PetscCallCeed(ceed, CeedElemRestrictionCreateVector(ceed_data->elem_restr_q, &mult_vec, NULL));
30 
31   // -- Get multiplicity
32   PetscCall(DMGetLocalVector(dm, &Multiplicity_loc));
33   PetscCall(VecPetscToCeed(Multiplicity_loc, &m_mem_type, mult_vec));
34   PetscCallCeed(ceed, CeedElemRestrictionGetMultiplicity(ceed_data->elem_restr_q, mult_vec));
35   PetscCall(VecCeedToPetsc(mult_vec, m_mem_type, Multiplicity_loc));
36 
37   PetscCall(DMGetGlobalVector(dm, &Multiplicity));
38   PetscCall(VecZeroEntries(Multiplicity));
39   PetscCall(DMLocalToGlobal(dm, Multiplicity_loc, ADD_VALUES, Multiplicity));
40 
41   // -- Fix multiplicity
42   PetscCall(VecPointwiseDivide(Q, Q, Multiplicity));
43   PetscCall(VecPointwiseDivide(Q_loc, Q_loc, Multiplicity_loc));
44 
45   PetscCall(DMRestoreLocalVector(dm, &Multiplicity_loc));
46   PetscCall(DMRestoreGlobalVector(dm, &Multiplicity));
47   PetscCallCeed(ceed, CeedVectorDestroy(&mult_vec));
48   PetscFunctionReturn(PETSC_SUCCESS);
49 }
50 
51 // Record boundary values from initial condition
52 PetscErrorCode SetBCsFromICs(DM dm, Vec Q, Vec Q_loc) {
53   PetscFunctionBeginUser;
54   {  // Capture initial condition values in Qbc
55     Vec Qbc;
56 
57     PetscCall(DMGetNamedLocalVector(dm, "Qbc", &Qbc));
58     PetscCall(VecCopy(Q_loc, Qbc));
59     PetscCall(VecZeroEntries(Q_loc));
60     PetscCall(DMGlobalToLocal(dm, Q, INSERT_VALUES, Q_loc));
61     PetscCall(VecAXPY(Qbc, -1., Q_loc));
62     PetscCall(DMRestoreNamedLocalVector(dm, "Qbc", &Qbc));
63   }
64   PetscCall(PetscObjectComposeFunction((PetscObject)dm, "DMPlexInsertBoundaryValues_C", DMPlexInsertBoundaryValues_FromICs));
65 
66   {  // Set boundary mask to zero out essential BCs
67     Vec boundary_mask, ones;
68 
69     PetscCall(DMGetNamedLocalVector(dm, "boundary mask", &boundary_mask));
70     PetscCall(DMGetGlobalVector(dm, &ones));
71     PetscCall(VecZeroEntries(boundary_mask));
72     PetscCall(VecSet(ones, 1.0));
73     PetscCall(DMGlobalToLocal(dm, ones, INSERT_VALUES, boundary_mask));
74     PetscCall(DMRestoreNamedLocalVector(dm, "boundary mask", &boundary_mask));
75     PetscCall(DMRestoreGlobalVector(dm, &ones));
76   }
77   PetscFunctionReturn(PETSC_SUCCESS);
78 }
79 
80 PetscErrorCode DMPlexInsertBoundaryValues_FromICs(DM dm, PetscBool insert_essential, Vec Q_loc, PetscReal time, Vec face_geom_FVM, Vec cell_geom_FVM,
81                                                   Vec grad_FVM) {
82   Vec Qbc, boundary_mask;
83 
84   PetscFunctionBeginUser;
85   // Mask (zero) Strong BC entries
86   PetscCall(DMGetNamedLocalVector(dm, "boundary mask", &boundary_mask));
87   PetscCall(VecPointwiseMult(Q_loc, Q_loc, boundary_mask));
88   PetscCall(DMRestoreNamedLocalVector(dm, "boundary mask", &boundary_mask));
89 
90   PetscCall(DMGetNamedLocalVector(dm, "Qbc", &Qbc));
91   PetscCall(VecAXPY(Q_loc, 1., Qbc));
92   PetscCall(DMRestoreNamedLocalVector(dm, "Qbc", &Qbc));
93   PetscFunctionReturn(PETSC_SUCCESS);
94 }
95 
96 static PetscErrorCode BinaryReadIntoInt(PetscViewer viewer, PetscInt *out, PetscDataType file_type) {
97   PetscFunctionBeginUser;
98   if (file_type == PETSC_INT32) {
99     PetscInt32 val;
100     PetscCall(PetscViewerBinaryRead(viewer, &val, 1, NULL, PETSC_INT32));
101     *out = val;
102   } else if (file_type == PETSC_INT64) {
103     PetscInt64 val;
104     PetscCall(PetscViewerBinaryRead(viewer, &val, 1, NULL, PETSC_INT64));
105     *out = val;
106   } else {
107     PetscCall(PetscViewerBinaryRead(viewer, out, 1, NULL, PETSC_INT));
108   }
109   PetscFunctionReturn(PETSC_SUCCESS);
110 }
111 
112 // @brief Load vector from binary file, possibly with embedded solution time and step number
113 PetscErrorCode LoadFluidsBinaryVec(MPI_Comm comm, PetscViewer viewer, Vec Q, PetscReal *time, PetscInt *step_number) {
114   PetscInt      file_step_number;
115   PetscInt32    token;
116   PetscReal     file_time;
117   PetscDataType file_type = PETSC_INT32;
118 
119   PetscFunctionBeginUser;
120   PetscCall(PetscViewerBinaryRead(viewer, &token, 1, NULL, PETSC_INT32));
121   if (token == FLUIDS_FILE_TOKEN_32 || token == FLUIDS_FILE_TOKEN_64 ||
122       token == FLUIDS_FILE_TOKEN) {  // New style format; we're reading a file with step number and time in the header
123     if (token == FLUIDS_FILE_TOKEN_32) file_type = PETSC_INT32;
124     else if (token == FLUIDS_FILE_TOKEN_64) file_type = PETSC_INT64;
125     PetscCall(BinaryReadIntoInt(viewer, &file_step_number, file_type));
126     PetscCall(PetscViewerBinaryRead(viewer, &file_time, 1, NULL, PETSC_REAL));
127     if (time) *time = file_time;
128     if (step_number) *step_number = file_step_number;
129   } else if (token == VEC_FILE_CLASSID) {  // Legacy format of just the vector, encoded as [VEC_FILE_CLASSID, length, ]
130     PetscInt length, N;
131     PetscCall(BinaryReadIntoInt(viewer, &length, file_type));
132     PetscCall(VecGetSize(Q, &N));
133     PetscCheck(length == N, comm, PETSC_ERR_ARG_INCOMP, "File Vec has length %" PetscInt_FMT " but DM has global Vec size %" PetscInt_FMT, length, N);
134     PetscCall(PetscViewerBinarySetSkipHeader(viewer, PETSC_TRUE));
135   } else SETERRQ(comm, PETSC_ERR_FILE_UNEXPECTED, "Not a fluids header token or a PETSc Vec in file");
136 
137   PetscCall(VecLoad(Q, viewer));
138   PetscFunctionReturn(PETSC_SUCCESS);
139 }
140 
141 // Compare reference solution values with current test run for CI
142 PetscErrorCode RegressionTest(AppCtx app_ctx, Vec Q) {
143   Vec         Qref;
144   PetscViewer viewer;
145   PetscReal   error, Qrefnorm;
146   MPI_Comm    comm = PetscObjectComm((PetscObject)Q);
147 
148   PetscFunctionBeginUser;
149   // Read reference file
150   PetscCall(VecDuplicate(Q, &Qref));
151   PetscCall(PetscViewerBinaryOpen(comm, app_ctx->test_file_path, FILE_MODE_READ, &viewer));
152   PetscCall(LoadFluidsBinaryVec(comm, viewer, Qref, NULL, NULL));
153 
154   // Compute error with respect to reference solution
155   PetscCall(VecAXPY(Q, -1.0, Qref));
156   PetscCall(VecNorm(Qref, NORM_MAX, &Qrefnorm));
157   PetscCall(VecScale(Q, 1. / Qrefnorm));
158   PetscCall(VecNorm(Q, NORM_MAX, &error));
159 
160   // Check error
161   if (error > app_ctx->test_tol) {
162     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Test failed with error norm %g\n", (double)error));
163   }
164 
165   // Cleanup
166   PetscCall(PetscViewerDestroy(&viewer));
167   PetscCall(VecDestroy(&Qref));
168   PetscFunctionReturn(PETSC_SUCCESS);
169 }
170 
171 // Get error for problems with exact solutions
172 PetscErrorCode PrintError(CeedData ceed_data, DM dm, User user, Vec Q, PetscScalar final_time) {
173   PetscInt  loc_nodes;
174   Vec       Q_exact, Q_exact_loc;
175   PetscReal rel_error, norm_error, norm_exact;
176 
177   PetscFunctionBeginUser;
178   // Get exact solution at final time
179   PetscCall(DMGetGlobalVector(dm, &Q_exact));
180   PetscCall(DMGetLocalVector(dm, &Q_exact_loc));
181   PetscCall(VecGetSize(Q_exact_loc, &loc_nodes));
182   PetscCall(ICs_FixMultiplicity(dm, ceed_data, user, Q_exact_loc, Q_exact, final_time));
183 
184   // Get |exact solution - obtained solution|
185   PetscCall(VecNorm(Q_exact, NORM_1, &norm_exact));
186   PetscCall(VecAXPY(Q, -1.0, Q_exact));
187   PetscCall(VecNorm(Q, NORM_1, &norm_error));
188 
189   rel_error = norm_error / norm_exact;
190   PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Relative Error: %g\n", (double)rel_error));
191   PetscCall(DMRestoreLocalVector(dm, &Q_exact_loc));
192   PetscCall(DMRestoreGlobalVector(dm, &Q_exact));
193   PetscFunctionReturn(PETSC_SUCCESS);
194 }
195 
196 // Post-processing
197 PetscErrorCode PostProcess(TS ts, CeedData ceed_data, DM dm, ProblemData problem, User user, Vec Q, PetscScalar final_time) {
198   PetscInt          steps;
199   TSConvergedReason reason;
200 
201   PetscFunctionBeginUser;
202   // Print relative error
203   if (problem->non_zero_time && user->app_ctx->test_type == TESTTYPE_NONE) {
204     PetscCall(PrintError(ceed_data, dm, user, Q, final_time));
205   }
206 
207   // Print final time and number of steps
208   PetscCall(TSGetStepNumber(ts, &steps));
209   PetscCall(TSGetConvergedReason(ts, &reason));
210   if (user->app_ctx->test_type == TESTTYPE_NONE) {
211     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Time integrator %s on time step %" PetscInt_FMT " with final time %g\n", TSConvergedReasons[reason],
212                           steps, (double)final_time));
213   }
214 
215   // Output numerical values from command line
216   PetscCall(VecViewFromOptions(Q, NULL, "-vec_view"));
217 
218   // Compare reference solution values with current test run for CI
219   if (user->app_ctx->test_type == TESTTYPE_SOLVER) {
220     PetscCall(RegressionTest(user->app_ctx, Q));
221   }
222   PetscFunctionReturn(PETSC_SUCCESS);
223 }
224 
225 const PetscInt32 FLUIDS_FILE_TOKEN    = 0xceedf00;  // for backwards compatibility
226 const PetscInt32 FLUIDS_FILE_TOKEN_32 = 0xceedf32;
227 const PetscInt32 FLUIDS_FILE_TOKEN_64 = 0xceedf64;
228 
229 // Gather initial Q values in case of continuation of simulation
230 PetscErrorCode SetupICsFromBinary(MPI_Comm comm, AppCtx app_ctx, Vec Q) {
231   PetscViewer viewer;
232 
233   PetscFunctionBeginUser;
234   PetscCall(PetscViewerBinaryOpen(comm, app_ctx->cont_file, FILE_MODE_READ, &viewer));
235   PetscCall(LoadFluidsBinaryVec(comm, viewer, Q, &app_ctx->cont_time, &app_ctx->cont_steps));
236   PetscCall(PetscViewerDestroy(&viewer));
237   PetscFunctionReturn(PETSC_SUCCESS);
238 }
239 
240 // Free a plain data context that was allocated using PETSc; returning libCEED error codes
241 int FreeContextPetsc(void *data) {
242   if (PetscFree(data)) return CeedError(NULL, CEED_ERROR_ACCESS, "PetscFree failed");
243   return CEED_ERROR_SUCCESS;
244 }
245 
246 // Return mass qfunction specification for number of components N
247 PetscErrorCode CreateMassQFunction(Ceed ceed, CeedInt N, CeedInt q_data_size, CeedQFunction *qf) {
248   PetscFunctionBeginUser;
249   switch (N) {
250     case 1:
251       PetscCallCeed(ceed, CeedQFunctionCreateInterior(ceed, 1, Mass_1, Mass_1_loc, qf));
252       break;
253     case 5:
254       PetscCallCeed(ceed, CeedQFunctionCreateInterior(ceed, 1, Mass_5, Mass_5_loc, qf));
255       break;
256     case 7:
257       PetscCallCeed(ceed, CeedQFunctionCreateInterior(ceed, 1, Mass_7, Mass_7_loc, qf));
258       break;
259     case 9:
260       PetscCallCeed(ceed, CeedQFunctionCreateInterior(ceed, 1, Mass_9, Mass_9_loc, qf));
261       break;
262     case 22:
263       PetscCallCeed(ceed, CeedQFunctionCreateInterior(ceed, 1, Mass_22, Mass_22_loc, qf));
264       break;
265     default:
266       SETERRQ(PETSC_COMM_WORLD, PETSC_ERR_SUP, "Could not find mass qfunction of size %d", N);
267   }
268 
269   PetscCallCeed(ceed, CeedQFunctionAddInput(*qf, "u", N, CEED_EVAL_INTERP));
270   PetscCallCeed(ceed, CeedQFunctionAddInput(*qf, "qdata", q_data_size, CEED_EVAL_NONE));
271   PetscCallCeed(ceed, CeedQFunctionAddOutput(*qf, "v", N, CEED_EVAL_INTERP));
272   PetscCallCeed(ceed, CeedQFunctionSetUserFlopsEstimate(*qf, N));
273   PetscFunctionReturn(PETSC_SUCCESS);
274 }
275 
276 PetscErrorCode NodalProjectionDataDestroy(NodalProjectionData context) {
277   PetscFunctionBeginUser;
278   if (context == NULL) PetscFunctionReturn(PETSC_SUCCESS);
279 
280   PetscCall(DMDestroy(&context->dm));
281   PetscCall(KSPDestroy(&context->ksp));
282 
283   PetscCall(OperatorApplyContextDestroy(context->l2_rhs_ctx));
284 
285   PetscCall(PetscFree(context));
286   PetscFunctionReturn(PETSC_SUCCESS);
287 }
288 
289 /*
290  * @brief Open a PHASTA *.dat file, grabbing dimensions and file pointer
291  *
292  * This function opens the file specified by `path` using `PetscFOpen` and passes the file pointer in `fp`.
293  * It is not closed in this function, thus `fp` must be closed sometime after this function has been called (using `PetscFClose` for example).
294  *
295  * Assumes that the first line of the file has the number of rows and columns as the only two entries, separated by a single space.
296  *
297  * @param[in]  comm           MPI_Comm for the program
298  * @param[in]  path           Path to the file
299  * @param[in]  char_array_len Length of the character array that should contain each line
300  * @param[out] dims           Dimensions of the file, taken from the first line of the file
301  * @param[out] fp File        pointer to the opened file
302  */
303 PetscErrorCode PhastaDatFileOpen(const MPI_Comm comm, const char path[PETSC_MAX_PATH_LEN], const PetscInt char_array_len, PetscInt dims[2],
304                                  FILE **fp) {
305   int    ndims;
306   char   line[char_array_len];
307   char **array;
308 
309   PetscFunctionBeginUser;
310   PetscCall(PetscFOpen(comm, path, "r", fp));
311   PetscCall(PetscSynchronizedFGets(comm, *fp, char_array_len, line));
312   PetscCall(PetscStrToArray(line, ' ', &ndims, &array));
313   PetscCheck(ndims == 2, comm, PETSC_ERR_FILE_UNEXPECTED, "Found %d dimensions instead of 2 on the first line of %s", ndims, path);
314 
315   for (PetscInt i = 0; i < ndims; i++) dims[i] = atoi(array[i]);
316   PetscCall(PetscStrToArrayDestroy(ndims, array));
317   PetscFunctionReturn(PETSC_SUCCESS);
318 }
319 
320 /*
321  * @brief Get the number of rows for the PHASTA file at path.
322  *
323  * Assumes that the first line of the file has the number of rows and columns as the only two entries, separated by a single space.
324  *
325  * @param[in]  comm  MPI_Comm for the program
326  * @param[in]  path  Path to the file
327  * @param[out] nrows Number of rows
328  */
329 PetscErrorCode PhastaDatFileGetNRows(const MPI_Comm comm, const char path[PETSC_MAX_PATH_LEN], PetscInt *nrows) {
330   const PetscInt char_array_len = 512;
331   PetscInt       dims[2];
332   FILE          *fp;
333 
334   PetscFunctionBeginUser;
335   PetscCall(PhastaDatFileOpen(comm, path, char_array_len, dims, &fp));
336   *nrows = dims[0];
337   PetscCall(PetscFClose(comm, fp));
338   PetscFunctionReturn(PETSC_SUCCESS);
339 }
340 
341 PetscErrorCode PhastaDatFileReadToArrayReal(MPI_Comm comm, const char path[PETSC_MAX_PATH_LEN], PetscReal array[]) {
342   PetscInt       dims[2];
343   int            ndims;
344   FILE          *fp;
345   const PetscInt char_array_len = 512;
346   char           line[char_array_len];
347   char         **row_array;
348 
349   PetscFunctionBeginUser;
350   PetscCall(PhastaDatFileOpen(comm, path, char_array_len, dims, &fp));
351 
352   for (PetscInt i = 0; i < dims[0]; i++) {
353     PetscCall(PetscSynchronizedFGets(comm, fp, char_array_len, line));
354     PetscCall(PetscStrToArray(line, ' ', &ndims, &row_array));
355     PetscCheck(ndims == dims[1], comm, PETSC_ERR_FILE_UNEXPECTED,
356                "Line %" PetscInt_FMT " of %s does not contain enough columns (%d instead of %" PetscInt_FMT ")", i, path, ndims, dims[1]);
357 
358     for (PetscInt j = 0; j < dims[1]; j++) {
359       array[i * dims[1] + j] = (PetscReal)atof(row_array[j]);
360     }
361   }
362 
363   PetscCall(PetscFClose(comm, fp));
364   PetscFunctionReturn(PETSC_SUCCESS);
365 }
366 
367 PetscLogEvent       FLUIDS_CeedOperatorApply;
368 PetscLogEvent       FLUIDS_SmartRedis_Init;
369 PetscLogEvent       FLUIDS_SmartRedis_Meta;
370 PetscLogEvent       FLUIDS_SmartRedis_Train;
371 PetscLogEvent       FLUIDS_TrainDataCompute;
372 PetscLogEvent       FLUIDS_DifferentialFilter;
373 PetscLogEvent       FLUIDS_VelocityGradientProjection;
374 static PetscClassId libCEED_classid, onlineTrain_classid, misc_classid;
375 
376 PetscErrorCode RegisterLogEvents() {
377   PetscFunctionBeginUser;
378   PetscCall(PetscClassIdRegister("libCEED", &libCEED_classid));
379   PetscCall(PetscLogEventRegister("CeedOpApply", libCEED_classid, &FLUIDS_CeedOperatorApply));
380 
381   PetscCall(PetscClassIdRegister("onlineTrain", &onlineTrain_classid));
382   PetscCall(PetscLogEventRegister("SmartRedis_Init", onlineTrain_classid, &FLUIDS_SmartRedis_Init));
383   PetscCall(PetscLogEventRegister("SmartRedis_Meta", onlineTrain_classid, &FLUIDS_SmartRedis_Meta));
384   PetscCall(PetscLogEventRegister("SmartRedis_Train", onlineTrain_classid, &FLUIDS_SmartRedis_Train));
385   PetscCall(PetscLogEventRegister("TrainDataCompute", onlineTrain_classid, &FLUIDS_TrainDataCompute));
386 
387   PetscCall(PetscClassIdRegister("Miscellaneous", &misc_classid));
388   PetscCall(PetscLogEventRegister("DiffFilter", misc_classid, &FLUIDS_DifferentialFilter));
389   PetscCall(PetscLogEventRegister("VeloGradProj", misc_classid, &FLUIDS_VelocityGradientProjection));
390   PetscFunctionReturn(PETSC_SUCCESS);
391 }
392 
393 // Print information about the given simulation run
394 PetscErrorCode PrintRunInfo(User user, Physics phys_ctx, ProblemData problem, TS ts) {
395   Ceed     ceed = user->ceed;
396   MPI_Comm comm = PetscObjectComm((PetscObject)ts);
397 
398   PetscFunctionBeginUser;
399   // Header and rank
400   char        host_name[PETSC_MAX_PATH_LEN];
401   PetscMPIInt rank, comm_size;
402   PetscCall(PetscGetHostName(host_name, sizeof host_name));
403   PetscCallMPI(MPI_Comm_rank(comm, &rank));
404   PetscCallMPI(MPI_Comm_size(comm, &comm_size));
405   PetscCall(PetscPrintf(comm,
406                         "\n-- Navier-Stokes solver - libCEED + PETSc --\n"
407                         "  MPI:\n"
408                         "    Host Name                          : %s\n"
409                         "    Total ranks                        : %d\n",
410                         host_name, comm_size));
411 
412   // Problem specific info
413   PetscCall(problem->print_info(user, problem, user->app_ctx));
414 
415   // libCEED
416   const char *used_resource;
417   CeedMemType mem_type_backend;
418   PetscCallCeed(ceed, CeedGetResource(user->ceed, &used_resource));
419   PetscCallCeed(ceed, CeedGetPreferredMemType(user->ceed, &mem_type_backend));
420   PetscCall(PetscPrintf(comm,
421                         "  libCEED:\n"
422                         "    libCEED Backend                    : %s\n"
423                         "    libCEED Backend MemType            : %s\n",
424                         used_resource, CeedMemTypes[mem_type_backend]));
425   // PETSc
426   VecType vec_type;
427   char    box_faces_str[PETSC_MAX_PATH_LEN] = "3,3,3";
428   if (problem->dim == 2) box_faces_str[3] = '\0';
429   PetscCall(PetscOptionsGetString(NULL, NULL, "-dm_plex_box_faces", box_faces_str, sizeof(box_faces_str), NULL));
430   PetscCall(DMGetVecType(user->dm, &vec_type));
431   PetscCall(PetscPrintf(comm,
432                         "  PETSc:\n"
433                         "    Box Faces                          : %s\n"
434                         "    DM VecType                         : %s\n"
435                         "    Time Stepping Scheme               : %s\n",
436                         box_faces_str, vec_type, phys_ctx->implicit ? "implicit" : "explicit"));
437   {
438     char           pmat_type_str[PETSC_MAX_PATH_LEN];
439     MatType        amat_type, pmat_type;
440     Mat            Amat, Pmat;
441     TSIJacobianFn *ijacob_function;
442 
443     PetscCall(TSGetIJacobian(ts, &Amat, &Pmat, &ijacob_function, NULL));
444     PetscCall(MatGetType(Amat, &amat_type));
445     PetscCall(MatGetType(Pmat, &pmat_type));
446 
447     PetscCall(PetscStrncpy(pmat_type_str, pmat_type, sizeof(pmat_type_str)));
448     if (!strcmp(pmat_type, MATCEED)) {
449       MatType pmat_coo_type;
450       char    pmat_coo_type_str[PETSC_MAX_PATH_LEN];
451 
452       PetscCall(MatCeedGetCOOMatType(Pmat, &pmat_coo_type));
453       PetscCall(PetscSNPrintf(pmat_coo_type_str, sizeof(pmat_coo_type_str), " (COO MatType: %s)", pmat_coo_type));
454       PetscCall(PetscStrlcat(pmat_type_str, pmat_coo_type_str, sizeof(pmat_type_str)));
455     }
456     if (ijacob_function) {
457       PetscCall(PetscPrintf(comm,
458                             "    IJacobian A MatType                : %s\n"
459                             "    IJacobian P MatType                : %s\n",
460                             amat_type, pmat_type_str));
461     }
462   }
463   if (user->app_ctx->cont_steps) {
464     PetscCall(PetscPrintf(comm,
465                           "  Continue:\n"
466                           "    Filename:                          : %s\n"
467                           "    Step:                              : %" PetscInt_FMT "\n"
468                           "    Time:                              : %g\n",
469                           user->app_ctx->cont_file, user->app_ctx->cont_steps, user->app_ctx->cont_time));
470   }
471   // Mesh
472   const PetscInt num_comp_q = 5;
473   PetscInt       glob_dofs, owned_dofs, local_dofs;
474   const CeedInt  num_P = user->app_ctx->degree + 1, num_Q = num_P + user->app_ctx->q_extra;
475   PetscCall(DMGetGlobalVectorInfo(user->dm, &owned_dofs, &glob_dofs, NULL));
476   PetscCall(DMGetLocalVectorInfo(user->dm, &local_dofs, NULL, NULL));
477   PetscCall(PetscPrintf(comm,
478                         "  Mesh:\n"
479                         "    Number of 1D Basis Nodes (P)       : %" CeedInt_FMT "\n"
480                         "    Number of 1D Quadrature Points (Q) : %" CeedInt_FMT "\n"
481                         "    Global DoFs                        : %" PetscInt_FMT "\n"
482                         "    DoFs per node                      : %" PetscInt_FMT "\n"
483                         "    Global %" PetscInt_FMT "-DoF nodes                 : %" PetscInt_FMT "\n",
484                         num_P, num_Q, glob_dofs, num_comp_q, num_comp_q, glob_dofs / num_comp_q));
485   // -- Get Partition Statistics
486   PetscCall(PetscPrintf(comm, "  Partition:                             (min,max,median,max/median)\n"));
487   {
488     PetscInt *gather_buffer = NULL;
489     PetscInt  part_owned_dofs[3], part_local_dofs[3], part_boundary_dofs[3], part_neighbors[3];
490     PetscInt  median_index = comm_size % 2 ? comm_size / 2 : comm_size / 2 - 1;
491     if (!rank) PetscCall(PetscMalloc1(comm_size, &gather_buffer));
492 
493     PetscCallMPI(MPI_Gather(&owned_dofs, 1, MPIU_INT, gather_buffer, 1, MPIU_INT, 0, comm));
494     if (!rank) {
495       PetscCall(PetscSortInt(comm_size, gather_buffer));
496       part_owned_dofs[0]             = gather_buffer[0];              // min
497       part_owned_dofs[1]             = gather_buffer[comm_size - 1];  // max
498       part_owned_dofs[2]             = gather_buffer[median_index];   // median
499       PetscReal part_owned_dof_ratio = (PetscReal)part_owned_dofs[1] / (PetscReal)part_owned_dofs[2];
500       PetscCall(PetscPrintf(
501           comm, "    Global Vector %" PetscInt_FMT "-DoF nodes          : %" PetscInt_FMT ", %" PetscInt_FMT ", %" PetscInt_FMT ", %f\n", num_comp_q,
502           part_owned_dofs[0] / num_comp_q, part_owned_dofs[1] / num_comp_q, part_owned_dofs[2] / num_comp_q, part_owned_dof_ratio));
503     }
504 
505     PetscCallMPI(MPI_Gather(&local_dofs, 1, MPIU_INT, gather_buffer, 1, MPIU_INT, 0, comm));
506     if (!rank) {
507       PetscCall(PetscSortInt(comm_size, gather_buffer));
508       part_local_dofs[0]             = gather_buffer[0];              // min
509       part_local_dofs[1]             = gather_buffer[comm_size - 1];  // max
510       part_local_dofs[2]             = gather_buffer[median_index];   // median
511       PetscReal part_local_dof_ratio = (PetscReal)part_local_dofs[1] / (PetscReal)part_local_dofs[2];
512       PetscCall(PetscPrintf(
513           comm, "    Local Vector %" PetscInt_FMT "-DoF nodes           : %" PetscInt_FMT ", %" PetscInt_FMT ", %" PetscInt_FMT ", %f\n", num_comp_q,
514           part_local_dofs[0] / num_comp_q, part_local_dofs[1] / num_comp_q, part_local_dofs[2] / num_comp_q, part_local_dof_ratio));
515     }
516 
517     if (comm_size != 1) {
518       PetscInt num_remote_roots_total = 0, num_remote_leaves_total = 0, num_ghost_interface_ranks = 0, num_owned_interface_ranks = 0;
519       {
520         PetscSF            sf;
521         PetscInt           nrranks, niranks;
522         const PetscInt    *roffset, *rmine, *rremote, *ioffset, *irootloc;
523         const PetscMPIInt *rranks, *iranks;
524         PetscCall(DMGetSectionSF(user->dm, &sf));
525         PetscCall(PetscSFGetRootRanks(sf, &nrranks, &rranks, &roffset, &rmine, &rremote));
526         PetscCall(PetscSFGetLeafRanks(sf, &niranks, &iranks, &ioffset, &irootloc));
527         for (PetscInt i = 0; i < nrranks; i++) {
528           if (rranks[i] == rank) continue;  // Ignore same-part global->local transfers
529           num_remote_roots_total += roffset[i + 1] - roffset[i];
530           num_ghost_interface_ranks++;
531         }
532         for (PetscInt i = 0; i < niranks; i++) {
533           if (iranks[i] == rank) continue;
534           num_remote_leaves_total += ioffset[i + 1] - ioffset[i];
535           num_owned_interface_ranks++;
536         }
537       }
538       PetscCallMPI(MPI_Gather(&num_remote_roots_total, 1, MPIU_INT, gather_buffer, 1, MPIU_INT, 0, comm));
539       if (!rank) {
540         PetscCall(PetscSortInt(comm_size, gather_buffer));
541         part_boundary_dofs[0]           = gather_buffer[0];              // min
542         part_boundary_dofs[1]           = gather_buffer[comm_size - 1];  // max
543         part_boundary_dofs[2]           = gather_buffer[median_index];   // median
544         PetscReal part_shared_dof_ratio = (PetscReal)part_boundary_dofs[1] / (PetscReal)part_boundary_dofs[2];
545         PetscCall(PetscPrintf(
546             comm, "    Ghost Interface %" PetscInt_FMT "-DoF nodes        : %" PetscInt_FMT ", %" PetscInt_FMT ", %" PetscInt_FMT ", %f\n",
547             num_comp_q, part_boundary_dofs[0] / num_comp_q, part_boundary_dofs[1] / num_comp_q, part_boundary_dofs[2] / num_comp_q,
548             part_shared_dof_ratio));
549       }
550 
551       PetscCallMPI(MPI_Gather(&num_ghost_interface_ranks, 1, MPIU_INT, gather_buffer, 1, MPIU_INT, 0, comm));
552       if (!rank) {
553         PetscCall(PetscSortInt(comm_size, gather_buffer));
554         part_neighbors[0]              = gather_buffer[0];              // min
555         part_neighbors[1]              = gather_buffer[comm_size - 1];  // max
556         part_neighbors[2]              = gather_buffer[median_index];   // median
557         PetscReal part_neighbors_ratio = (PetscReal)part_neighbors[1] / (PetscReal)part_neighbors[2];
558         PetscCall(PetscPrintf(comm, "    Ghost Interface Ranks              : %" PetscInt_FMT ", %" PetscInt_FMT ", %" PetscInt_FMT ", %f\n",
559                               part_neighbors[0], part_neighbors[1], part_neighbors[2], part_neighbors_ratio));
560       }
561 
562       PetscCallMPI(MPI_Gather(&num_remote_leaves_total, 1, MPIU_INT, gather_buffer, 1, MPIU_INT, 0, comm));
563       if (!rank) {
564         PetscCall(PetscSortInt(comm_size, gather_buffer));
565         part_boundary_dofs[0]           = gather_buffer[0];              // min
566         part_boundary_dofs[1]           = gather_buffer[comm_size - 1];  // max
567         part_boundary_dofs[2]           = gather_buffer[median_index];   // median
568         PetscReal part_shared_dof_ratio = (PetscReal)part_boundary_dofs[1] / (PetscReal)part_boundary_dofs[2];
569         PetscCall(PetscPrintf(
570             comm, "    Owned Interface %" PetscInt_FMT "-DoF nodes        : %" PetscInt_FMT ", %" PetscInt_FMT ", %" PetscInt_FMT ", %f\n",
571             num_comp_q, part_boundary_dofs[0] / num_comp_q, part_boundary_dofs[1] / num_comp_q, part_boundary_dofs[2] / num_comp_q,
572             part_shared_dof_ratio));
573       }
574 
575       PetscCallMPI(MPI_Gather(&num_owned_interface_ranks, 1, MPIU_INT, gather_buffer, 1, MPIU_INT, 0, comm));
576       if (!rank) {
577         PetscCall(PetscSortInt(comm_size, gather_buffer));
578         part_neighbors[0]              = gather_buffer[0];              // min
579         part_neighbors[1]              = gather_buffer[comm_size - 1];  // max
580         part_neighbors[2]              = gather_buffer[median_index];   // median
581         PetscReal part_neighbors_ratio = (PetscReal)part_neighbors[1] / (PetscReal)part_neighbors[2];
582         PetscCall(PetscPrintf(comm, "    Owned Interface Ranks              : %" PetscInt_FMT ", %" PetscInt_FMT ", %" PetscInt_FMT ", %f\n",
583                               part_neighbors[0], part_neighbors[1], part_neighbors[2], part_neighbors_ratio));
584       }
585     }
586 
587     if (!rank) PetscCall(PetscFree(gather_buffer));
588   }
589   PetscFunctionReturn(PETSC_SUCCESS);
590 }
591