xref: /libCEED/examples/fluids/src/misc.c (revision d0593705e733b5bdd5e4c173fe0008b11db2ed29)
1 // Copyright (c) 2017-2022, Lawrence Livermore National Security, LLC and other CEED contributors.
2 // All Rights Reserved. See the top-level LICENSE and NOTICE files for details.
3 //
4 // SPDX-License-Identifier: BSD-2-Clause
5 //
6 // This file is part of CEED:  http://github.com/ceed
7 
8 /// @file
9 /// Miscellaneous utility functions
10 
11 #include <ceed.h>
12 #include <petscdm.h>
13 #include <petscsf.h>
14 #include <petscts.h>
15 
16 #include "../navierstokes.h"
17 #include "../qfunctions/mass.h"
18 
19 PetscErrorCode ICs_FixMultiplicity(DM dm, CeedData ceed_data, User user, Vec Q_loc, Vec Q, CeedScalar time) {
20   Ceed         ceed = user->ceed;
21   CeedVector   mult_vec;
22   PetscMemType m_mem_type;
23   Vec          Multiplicity, Multiplicity_loc;
24 
25   PetscFunctionBeginUser;
26   if (user->phys->ics_time_label) PetscCallCeed(ceed, CeedOperatorSetContextDouble(ceed_data->op_ics_ctx->op, user->phys->ics_time_label, &time));
27   PetscCall(ApplyCeedOperatorLocalToGlobal(NULL, Q, ceed_data->op_ics_ctx));
28 
29   PetscCallCeed(ceed, CeedElemRestrictionCreateVector(ceed_data->elem_restr_q, &mult_vec, NULL));
30 
31   // -- Get multiplicity
32   PetscCall(DMGetLocalVector(dm, &Multiplicity_loc));
33   PetscCall(VecPetscToCeed(Multiplicity_loc, &m_mem_type, mult_vec));
34   PetscCallCeed(ceed, CeedElemRestrictionGetMultiplicity(ceed_data->elem_restr_q, mult_vec));
35   PetscCall(VecCeedToPetsc(mult_vec, m_mem_type, Multiplicity_loc));
36 
37   PetscCall(DMGetGlobalVector(dm, &Multiplicity));
38   PetscCall(VecZeroEntries(Multiplicity));
39   PetscCall(DMLocalToGlobal(dm, Multiplicity_loc, ADD_VALUES, Multiplicity));
40 
41   // -- Fix multiplicity
42   PetscCall(VecPointwiseDivide(Q, Q, Multiplicity));
43   PetscCall(VecPointwiseDivide(Q_loc, Q_loc, Multiplicity_loc));
44 
45   PetscCall(DMRestoreLocalVector(dm, &Multiplicity_loc));
46   PetscCall(DMRestoreGlobalVector(dm, &Multiplicity));
47   PetscCallCeed(ceed, CeedVectorDestroy(&mult_vec));
48   PetscFunctionReturn(PETSC_SUCCESS);
49 }
50 
51 // Record boundary values from initial condition
52 PetscErrorCode SetBCsFromICs(DM dm, Vec Q, Vec Q_loc) {
53   Vec Qbc, boundary_mask;
54 
55   PetscFunctionBeginUser;
56   PetscCall(DMGetNamedLocalVector(dm, "Qbc", &Qbc));
57   PetscCall(VecCopy(Q_loc, Qbc));
58   PetscCall(VecZeroEntries(Q_loc));
59   PetscCall(DMGlobalToLocal(dm, Q, INSERT_VALUES, Q_loc));
60   PetscCall(VecAXPY(Qbc, -1., Q_loc));
61   PetscCall(DMRestoreNamedLocalVector(dm, "Qbc", &Qbc));
62   PetscCall(PetscObjectComposeFunction((PetscObject)dm, "DMPlexInsertBoundaryValues_C", DMPlexInsertBoundaryValues_FromICs));
63 
64   PetscCall(DMGetNamedLocalVector(dm, "boundary mask", &boundary_mask));
65   PetscCall(DMGetGlobalVector(dm, &Q));
66   PetscCall(VecZeroEntries(boundary_mask));
67   PetscCall(VecSet(Q, 1.0));
68   PetscCall(DMGlobalToLocal(dm, Q, INSERT_VALUES, boundary_mask));
69   PetscCall(DMRestoreNamedLocalVector(dm, "boundary mask", &boundary_mask));
70   PetscFunctionReturn(PETSC_SUCCESS);
71 }
72 
73 PetscErrorCode DMPlexInsertBoundaryValues_FromICs(DM dm, PetscBool insert_essential, Vec Q_loc, PetscReal time, Vec face_geom_FVM, Vec cell_geom_FVM,
74                                                   Vec grad_FVM) {
75   Vec Qbc, boundary_mask;
76 
77   PetscFunctionBeginUser;
78   // Mask (zero) Strong BC entries
79   PetscCall(DMGetNamedLocalVector(dm, "boundary mask", &boundary_mask));
80   PetscCall(VecPointwiseMult(Q_loc, Q_loc, boundary_mask));
81   PetscCall(DMRestoreNamedLocalVector(dm, "boundary mask", &boundary_mask));
82 
83   PetscCall(DMGetNamedLocalVector(dm, "Qbc", &Qbc));
84   PetscCall(VecAXPY(Q_loc, 1., Qbc));
85   PetscCall(DMRestoreNamedLocalVector(dm, "Qbc", &Qbc));
86   PetscFunctionReturn(PETSC_SUCCESS);
87 }
88 
89 // @brief Load vector from binary file, possibly with embedded solution time and step number
90 PetscErrorCode LoadFluidsBinaryVec(MPI_Comm comm, PetscViewer viewer, Vec Q, PetscReal *time, PetscInt *step_number) {
91   PetscInt   file_step_number;
92   PetscInt32 token;
93   PetscReal  file_time;
94 
95   PetscFunctionBeginUser;
96   PetscCall(PetscViewerBinaryRead(viewer, &token, 1, NULL, PETSC_INT32));
97   if (token == FLUIDS_FILE_TOKEN_32 || token == FLUIDS_FILE_TOKEN_64 ||
98       token == FLUIDS_FILE_TOKEN) {  // New style format; we're reading a file with step number and time in the header
99     PetscCall(PetscViewerBinaryRead(viewer, &file_step_number, 1, NULL, PETSC_INT));
100     PetscCall(PetscViewerBinaryRead(viewer, &file_time, 1, NULL, PETSC_REAL));
101     if (time) *time = file_time;
102     if (step_number) *step_number = file_step_number;
103   } else if (token == VEC_FILE_CLASSID) {  // Legacy format of just the vector, encoded as [VEC_FILE_CLASSID, length, ]
104     PetscInt length, N;
105     PetscCall(PetscViewerBinaryRead(viewer, &length, 1, NULL, PETSC_INT));
106     PetscCall(VecGetSize(Q, &N));
107     PetscCheck(length == N, comm, PETSC_ERR_ARG_INCOMP, "File Vec has length %" PetscInt_FMT " but DM has global Vec size %" PetscInt_FMT, length, N);
108     PetscCall(PetscViewerBinarySetSkipHeader(viewer, PETSC_TRUE));
109   } else SETERRQ(comm, PETSC_ERR_FILE_UNEXPECTED, "Not a fluids header token or a PETSc Vec in file");
110 
111   PetscCall(VecLoad(Q, viewer));
112   PetscFunctionReturn(PETSC_SUCCESS);
113 }
114 
115 // Compare reference solution values with current test run for CI
116 PetscErrorCode RegressionTest(AppCtx app_ctx, Vec Q) {
117   Vec         Qref;
118   PetscViewer viewer;
119   PetscReal   error, Qrefnorm;
120   MPI_Comm    comm = PetscObjectComm((PetscObject)Q);
121 
122   PetscFunctionBeginUser;
123   // Read reference file
124   PetscCall(VecDuplicate(Q, &Qref));
125   PetscCall(PetscViewerBinaryOpen(comm, app_ctx->test_file_path, FILE_MODE_READ, &viewer));
126   PetscCall(LoadFluidsBinaryVec(comm, viewer, Qref, NULL, NULL));
127 
128   // Compute error with respect to reference solution
129   PetscCall(VecAXPY(Q, -1.0, Qref));
130   PetscCall(VecNorm(Qref, NORM_MAX, &Qrefnorm));
131   PetscCall(VecScale(Q, 1. / Qrefnorm));
132   PetscCall(VecNorm(Q, NORM_MAX, &error));
133 
134   // Check error
135   if (error > app_ctx->test_tol) {
136     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Test failed with error norm %g\n", (double)error));
137   }
138 
139   // Cleanup
140   PetscCall(PetscViewerDestroy(&viewer));
141   PetscCall(VecDestroy(&Qref));
142   PetscFunctionReturn(PETSC_SUCCESS);
143 }
144 
145 // Get error for problems with exact solutions
146 PetscErrorCode PrintError(CeedData ceed_data, DM dm, User user, Vec Q, PetscScalar final_time) {
147   PetscInt  loc_nodes;
148   Vec       Q_exact, Q_exact_loc;
149   PetscReal rel_error, norm_error, norm_exact;
150 
151   PetscFunctionBeginUser;
152   // Get exact solution at final time
153   PetscCall(DMGetGlobalVector(dm, &Q_exact));
154   PetscCall(DMGetLocalVector(dm, &Q_exact_loc));
155   PetscCall(VecGetSize(Q_exact_loc, &loc_nodes));
156   PetscCall(ICs_FixMultiplicity(dm, ceed_data, user, Q_exact_loc, Q_exact, final_time));
157 
158   // Get |exact solution - obtained solution|
159   PetscCall(VecNorm(Q_exact, NORM_1, &norm_exact));
160   PetscCall(VecAXPY(Q, -1.0, Q_exact));
161   PetscCall(VecNorm(Q, NORM_1, &norm_error));
162 
163   rel_error = norm_error / norm_exact;
164   PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Relative Error: %g\n", (double)rel_error));
165   PetscCall(DMRestoreLocalVector(dm, &Q_exact_loc));
166   PetscCall(DMRestoreGlobalVector(dm, &Q_exact));
167   PetscFunctionReturn(PETSC_SUCCESS);
168 }
169 
170 // Post-processing
171 PetscErrorCode PostProcess(TS ts, CeedData ceed_data, DM dm, ProblemData *problem, User user, Vec Q, PetscScalar final_time) {
172   PetscInt          steps;
173   TSConvergedReason reason;
174 
175   PetscFunctionBeginUser;
176   // Print relative error
177   if (problem->non_zero_time && user->app_ctx->test_type == TESTTYPE_NONE) {
178     PetscCall(PrintError(ceed_data, dm, user, Q, final_time));
179   }
180 
181   // Print final time and number of steps
182   PetscCall(TSGetStepNumber(ts, &steps));
183   PetscCall(TSGetConvergedReason(ts, &reason));
184   if (user->app_ctx->test_type == TESTTYPE_NONE) {
185     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Time integrator %s on time step %" PetscInt_FMT " with final time %g\n", TSConvergedReasons[reason],
186                           steps, (double)final_time));
187   }
188 
189   // Output numerical values from command line
190   PetscCall(VecViewFromOptions(Q, NULL, "-vec_view"));
191 
192   // Compare reference solution values with current test run for CI
193   if (user->app_ctx->test_type == TESTTYPE_SOLVER) {
194     PetscCall(RegressionTest(user->app_ctx, Q));
195   }
196   PetscFunctionReturn(PETSC_SUCCESS);
197 }
198 
199 const PetscInt32 FLUIDS_FILE_TOKEN    = 0xceedf00;  // for backwards compatibility
200 const PetscInt32 FLUIDS_FILE_TOKEN_32 = 0xceedf32;
201 const PetscInt32 FLUIDS_FILE_TOKEN_64 = 0xceedf64;
202 
203 // Gather initial Q values in case of continuation of simulation
204 PetscErrorCode SetupICsFromBinary(MPI_Comm comm, AppCtx app_ctx, Vec Q) {
205   PetscViewer viewer;
206 
207   PetscFunctionBeginUser;
208   PetscCall(PetscViewerBinaryOpen(comm, app_ctx->cont_file, FILE_MODE_READ, &viewer));
209   PetscCall(LoadFluidsBinaryVec(comm, viewer, Q, &app_ctx->cont_time, &app_ctx->cont_steps));
210   PetscCall(PetscViewerDestroy(&viewer));
211   PetscFunctionReturn(PETSC_SUCCESS);
212 }
213 
214 // Free a plain data context that was allocated using PETSc; returning libCEED error codes
215 int FreeContextPetsc(void *data) {
216   if (PetscFree(data)) return CeedError(NULL, CEED_ERROR_ACCESS, "PetscFree failed");
217   return CEED_ERROR_SUCCESS;
218 }
219 
220 // Return mass qfunction specification for number of components N
221 PetscErrorCode CreateMassQFunction(Ceed ceed, CeedInt N, CeedInt q_data_size, CeedQFunction *qf) {
222   PetscFunctionBeginUser;
223   switch (N) {
224     case 1:
225       PetscCallCeed(ceed, CeedQFunctionCreateInterior(ceed, 1, Mass_1, Mass_1_loc, qf));
226       break;
227     case 5:
228       PetscCallCeed(ceed, CeedQFunctionCreateInterior(ceed, 1, Mass_5, Mass_5_loc, qf));
229       break;
230     case 7:
231       PetscCallCeed(ceed, CeedQFunctionCreateInterior(ceed, 1, Mass_7, Mass_7_loc, qf));
232       break;
233     case 9:
234       PetscCallCeed(ceed, CeedQFunctionCreateInterior(ceed, 1, Mass_9, Mass_9_loc, qf));
235       break;
236     case 22:
237       PetscCallCeed(ceed, CeedQFunctionCreateInterior(ceed, 1, Mass_22, Mass_22_loc, qf));
238       break;
239     default:
240       SETERRQ(PETSC_COMM_WORLD, PETSC_ERR_SUP, "Could not find mass qfunction of size %d", N);
241   }
242 
243   PetscCallCeed(ceed, CeedQFunctionAddInput(*qf, "u", N, CEED_EVAL_INTERP));
244   PetscCallCeed(ceed, CeedQFunctionAddInput(*qf, "qdata", q_data_size, CEED_EVAL_NONE));
245   PetscCallCeed(ceed, CeedQFunctionAddOutput(*qf, "v", N, CEED_EVAL_INTERP));
246   PetscCallCeed(ceed, CeedQFunctionSetUserFlopsEstimate(*qf, N));
247   PetscFunctionReturn(PETSC_SUCCESS);
248 }
249 
250 PetscErrorCode NodalProjectionDataDestroy(NodalProjectionData context) {
251   PetscFunctionBeginUser;
252   if (context == NULL) PetscFunctionReturn(PETSC_SUCCESS);
253 
254   PetscCall(DMDestroy(&context->dm));
255   PetscCall(KSPDestroy(&context->ksp));
256 
257   PetscCall(OperatorApplyContextDestroy(context->l2_rhs_ctx));
258 
259   PetscCall(PetscFree(context));
260   PetscFunctionReturn(PETSC_SUCCESS);
261 }
262 
263 /*
264  * @brief Open a PHASTA *.dat file, grabbing dimensions and file pointer
265  *
266  * This function opens the file specified by `path` using `PetscFOpen` and passes the file pointer in `fp`.
267  * It is not closed in this function, thus `fp` must be closed sometime after this function has been called (using `PetscFClose` for example).
268  *
269  * Assumes that the first line of the file has the number of rows and columns as the only two entries, separated by a single space.
270  *
271  * @param[in]  comm           MPI_Comm for the program
272  * @param[in]  path           Path to the file
273  * @param[in]  char_array_len Length of the character array that should contain each line
274  * @param[out] dims           Dimensions of the file, taken from the first line of the file
275  * @param[out] fp File        pointer to the opened file
276  */
277 PetscErrorCode PhastaDatFileOpen(const MPI_Comm comm, const char path[PETSC_MAX_PATH_LEN], const PetscInt char_array_len, PetscInt dims[2],
278                                  FILE **fp) {
279   int    ndims;
280   char   line[char_array_len];
281   char **array;
282 
283   PetscFunctionBeginUser;
284   PetscCall(PetscFOpen(comm, path, "r", fp));
285   PetscCall(PetscSynchronizedFGets(comm, *fp, char_array_len, line));
286   PetscCall(PetscStrToArray(line, ' ', &ndims, &array));
287   PetscCheck(ndims == 2, comm, PETSC_ERR_FILE_UNEXPECTED, "Found %d dimensions instead of 2 on the first line of %s", ndims, path);
288 
289   for (PetscInt i = 0; i < ndims; i++) dims[i] = atoi(array[i]);
290   PetscCall(PetscStrToArrayDestroy(ndims, array));
291   PetscFunctionReturn(PETSC_SUCCESS);
292 }
293 
294 /*
295  * @brief Get the number of rows for the PHASTA file at path.
296  *
297  * Assumes that the first line of the file has the number of rows and columns as the only two entries, separated by a single space.
298  *
299  * @param[in]  comm  MPI_Comm for the program
300  * @param[in]  path  Path to the file
301  * @param[out] nrows Number of rows
302  */
303 PetscErrorCode PhastaDatFileGetNRows(const MPI_Comm comm, const char path[PETSC_MAX_PATH_LEN], PetscInt *nrows) {
304   const PetscInt char_array_len = 512;
305   PetscInt       dims[2];
306   FILE          *fp;
307 
308   PetscFunctionBeginUser;
309   PetscCall(PhastaDatFileOpen(comm, path, char_array_len, dims, &fp));
310   *nrows = dims[0];
311   PetscCall(PetscFClose(comm, fp));
312   PetscFunctionReturn(PETSC_SUCCESS);
313 }
314 
315 PetscErrorCode PhastaDatFileReadToArrayReal(MPI_Comm comm, const char path[PETSC_MAX_PATH_LEN], PetscReal array[]) {
316   PetscInt       dims[2];
317   int            ndims;
318   FILE          *fp;
319   const PetscInt char_array_len = 512;
320   char           line[char_array_len];
321   char         **row_array;
322 
323   PetscFunctionBeginUser;
324   PetscCall(PhastaDatFileOpen(comm, path, char_array_len, dims, &fp));
325 
326   for (PetscInt i = 0; i < dims[0]; i++) {
327     PetscCall(PetscSynchronizedFGets(comm, fp, char_array_len, line));
328     PetscCall(PetscStrToArray(line, ' ', &ndims, &row_array));
329     PetscCheck(ndims == dims[1], comm, PETSC_ERR_FILE_UNEXPECTED,
330                "Line %" PetscInt_FMT " of %s does not contain enough columns (%d instead of %" PetscInt_FMT ")", i, path, ndims, dims[1]);
331 
332     for (PetscInt j = 0; j < dims[1]; j++) {
333       array[i * dims[1] + j] = (PetscReal)atof(row_array[j]);
334     }
335   }
336 
337   PetscCall(PetscFClose(comm, fp));
338   PetscFunctionReturn(PETSC_SUCCESS);
339 }
340 
341 PetscLogEvent       FLUIDS_CeedOperatorApply;
342 PetscLogEvent       FLUIDS_CeedOperatorAssemble;
343 PetscLogEvent       FLUIDS_CeedOperatorAssembleDiagonal;
344 PetscLogEvent       FLUIDS_CeedOperatorAssemblePointBlockDiagonal;
345 PetscLogEvent       FLUIDS_SmartRedis_Init;
346 PetscLogEvent       FLUIDS_SmartRedis_Meta;
347 PetscLogEvent       FLUIDS_SmartRedis_Train;
348 PetscLogEvent       FLUIDS_TrainDataCompute;
349 PetscLogEvent       FLUIDS_DifferentialFilter;
350 PetscLogEvent       FLUIDS_VelocityGradientProjection;
351 static PetscClassId libCEED_classid, onlineTrain_classid, misc_classid;
352 
353 PetscErrorCode RegisterLogEvents() {
354   PetscFunctionBeginUser;
355   PetscCall(PetscClassIdRegister("libCEED", &libCEED_classid));
356   PetscCall(PetscLogEventRegister("CeedOpApply", libCEED_classid, &FLUIDS_CeedOperatorApply));
357   PetscCall(PetscLogEventRegister("CeedOpAsm", libCEED_classid, &FLUIDS_CeedOperatorAssemble));
358   PetscCall(PetscLogEventRegister("CeedOpAsmD", libCEED_classid, &FLUIDS_CeedOperatorAssembleDiagonal));
359   PetscCall(PetscLogEventRegister("CeedOpAsmPBD", libCEED_classid, &FLUIDS_CeedOperatorAssemblePointBlockDiagonal));
360 
361   PetscCall(PetscClassIdRegister("onlineTrain", &onlineTrain_classid));
362   PetscCall(PetscLogEventRegister("SmartRedis_Init", onlineTrain_classid, &FLUIDS_SmartRedis_Init));
363   PetscCall(PetscLogEventRegister("SmartRedis_Meta", onlineTrain_classid, &FLUIDS_SmartRedis_Meta));
364   PetscCall(PetscLogEventRegister("SmartRedis_Train", onlineTrain_classid, &FLUIDS_SmartRedis_Train));
365   PetscCall(PetscLogEventRegister("TrainDataCompute", onlineTrain_classid, &FLUIDS_TrainDataCompute));
366 
367   PetscCall(PetscClassIdRegister("Miscellaneous", &misc_classid));
368   PetscCall(PetscLogEventRegister("DiffFilter", misc_classid, &FLUIDS_DifferentialFilter));
369   PetscCall(PetscLogEventRegister("VeloGradProj", misc_classid, &FLUIDS_VelocityGradientProjection));
370   PetscFunctionReturn(PETSC_SUCCESS);
371 }
372 
373 // Print information about the given simulation run
374 PetscErrorCode PrintRunInfo(User user, Physics phys_ctx, ProblemData *problem, MPI_Comm comm) {
375   Ceed ceed = user->ceed;
376   PetscFunctionBeginUser;
377   // Header and rank
378   char        host_name[PETSC_MAX_PATH_LEN];
379   PetscMPIInt rank, comm_size;
380   PetscCall(PetscGetHostName(host_name, sizeof host_name));
381   PetscCallMPI(MPI_Comm_rank(comm, &rank));
382   PetscCallMPI(MPI_Comm_size(comm, &comm_size));
383   PetscCall(PetscPrintf(comm,
384                         "\n-- Navier-Stokes solver - libCEED + PETSc --\n"
385                         "  MPI:\n"
386                         "    Host Name                          : %s\n"
387                         "    Total ranks                        : %d\n",
388                         host_name, comm_size));
389 
390   // Problem specific info
391   PetscCall(problem->print_info(user, problem, user->app_ctx));
392 
393   // libCEED
394   const char *used_resource;
395   CeedMemType mem_type_backend;
396   PetscCallCeed(ceed, CeedGetResource(user->ceed, &used_resource));
397   PetscCallCeed(ceed, CeedGetPreferredMemType(user->ceed, &mem_type_backend));
398   PetscCall(PetscPrintf(comm,
399                         "  libCEED:\n"
400                         "    libCEED Backend                    : %s\n"
401                         "    libCEED Backend MemType            : %s\n",
402                         used_resource, CeedMemTypes[mem_type_backend]));
403   // PETSc
404   char box_faces_str[PETSC_MAX_PATH_LEN] = "3,3,3";
405   if (problem->dim == 2) box_faces_str[3] = '\0';
406   PetscCall(PetscOptionsGetString(NULL, NULL, "-dm_plex_box_faces", box_faces_str, sizeof(box_faces_str), NULL));
407   MatType amat_type = user->app_ctx->amat_type, pmat_type;
408   VecType vec_type;
409   PetscCall(DMGetMatType(user->dm, &pmat_type));
410   if (!amat_type) amat_type = pmat_type;
411   PetscCall(DMGetVecType(user->dm, &vec_type));
412   PetscCall(PetscPrintf(comm,
413                         "  PETSc:\n"
414                         "    Box Faces                          : %s\n"
415                         "    A MatType                          : %s\n"
416                         "    P MatType                          : %s\n"
417                         "    DM VecType                         : %s\n"
418                         "    Time Stepping Scheme               : %s\n",
419                         box_faces_str, amat_type, pmat_type, vec_type, phys_ctx->implicit ? "implicit" : "explicit"));
420   if (user->app_ctx->cont_steps) {
421     PetscCall(PetscPrintf(comm,
422                           "  Continue:\n"
423                           "    Filename:                          : %s\n"
424                           "    Step:                              : %" PetscInt_FMT "\n"
425                           "    Time:                              : %g\n",
426                           user->app_ctx->cont_file, user->app_ctx->cont_steps, user->app_ctx->cont_time));
427   }
428   // Mesh
429   const PetscInt num_comp_q = 5;
430   PetscInt       glob_dofs, owned_dofs, local_dofs;
431   const CeedInt  num_P = user->app_ctx->degree + 1, num_Q = num_P + user->app_ctx->q_extra;
432   PetscCall(DMGetGlobalVectorInfo(user->dm, &owned_dofs, &glob_dofs, NULL));
433   PetscCall(DMGetLocalVectorInfo(user->dm, &local_dofs, NULL, NULL));
434   PetscCall(PetscPrintf(comm,
435                         "  Mesh:\n"
436                         "    Number of 1D Basis Nodes (P)       : %" CeedInt_FMT "\n"
437                         "    Number of 1D Quadrature Points (Q) : %" CeedInt_FMT "\n"
438                         "    Global DoFs                        : %" PetscInt_FMT "\n"
439                         "    DoFs per node                      : %" PetscInt_FMT "\n"
440                         "    Global %" PetscInt_FMT "-DoF nodes                 : %" PetscInt_FMT "\n",
441                         num_P, num_Q, glob_dofs, num_comp_q, num_comp_q, glob_dofs / num_comp_q));
442   // -- Get Partition Statistics
443   PetscCall(PetscPrintf(comm, "  Partition:                             (min,max,median,max/median)\n"));
444   {
445     PetscInt *gather_buffer = NULL;
446     PetscInt  part_owned_dofs[3], part_local_dofs[3], part_boundary_dofs[3], part_neighbors[3];
447     PetscInt  median_index = comm_size % 2 ? comm_size / 2 : comm_size / 2 - 1;
448     if (!rank) PetscCall(PetscMalloc1(comm_size, &gather_buffer));
449 
450     PetscCallMPI(MPI_Gather(&owned_dofs, 1, MPIU_INT, gather_buffer, 1, MPIU_INT, 0, comm));
451     if (!rank) {
452       PetscCall(PetscSortInt(comm_size, gather_buffer));
453       part_owned_dofs[0]             = gather_buffer[0];              // min
454       part_owned_dofs[1]             = gather_buffer[comm_size - 1];  // max
455       part_owned_dofs[2]             = gather_buffer[median_index];   // median
456       PetscReal part_owned_dof_ratio = (PetscReal)part_owned_dofs[1] / (PetscReal)part_owned_dofs[2];
457       PetscCall(PetscPrintf(
458           comm, "    Global Vector %" PetscInt_FMT "-DoF nodes          : %" PetscInt_FMT ", %" PetscInt_FMT ", %" PetscInt_FMT ", %f\n", num_comp_q,
459           part_owned_dofs[0] / num_comp_q, part_owned_dofs[1] / num_comp_q, part_owned_dofs[2] / num_comp_q, part_owned_dof_ratio));
460     }
461 
462     PetscCallMPI(MPI_Gather(&local_dofs, 1, MPIU_INT, gather_buffer, 1, MPIU_INT, 0, comm));
463     if (!rank) {
464       PetscCall(PetscSortInt(comm_size, gather_buffer));
465       part_local_dofs[0]             = gather_buffer[0];              // min
466       part_local_dofs[1]             = gather_buffer[comm_size - 1];  // max
467       part_local_dofs[2]             = gather_buffer[median_index];   // median
468       PetscReal part_local_dof_ratio = (PetscReal)part_local_dofs[1] / (PetscReal)part_local_dofs[2];
469       PetscCall(PetscPrintf(
470           comm, "    Local Vector %" PetscInt_FMT "-DoF nodes           : %" PetscInt_FMT ", %" PetscInt_FMT ", %" PetscInt_FMT ", %f\n", num_comp_q,
471           part_local_dofs[0] / num_comp_q, part_local_dofs[1] / num_comp_q, part_local_dofs[2] / num_comp_q, part_local_dof_ratio));
472     }
473 
474     if (comm_size != 1) {
475       PetscInt num_remote_roots_total = 0, num_remote_leaves_total = 0, num_ghost_interface_ranks = 0, num_owned_interface_ranks = 0;
476       {
477         PetscSF            sf;
478         PetscInt           nrranks, niranks;
479         const PetscInt    *roffset, *rmine, *rremote, *ioffset, *irootloc;
480         const PetscMPIInt *rranks, *iranks;
481         PetscCall(DMGetSectionSF(user->dm, &sf));
482         PetscCall(PetscSFGetRootRanks(sf, &nrranks, &rranks, &roffset, &rmine, &rremote));
483         PetscCall(PetscSFGetLeafRanks(sf, &niranks, &iranks, &ioffset, &irootloc));
484         for (PetscInt i = 0; i < nrranks; i++) {
485           if (rranks[i] == rank) continue;  // Ignore same-part global->local transfers
486           num_remote_roots_total += roffset[i + 1] - roffset[i];
487           num_ghost_interface_ranks++;
488         }
489         for (PetscInt i = 0; i < niranks; i++) {
490           if (iranks[i] == rank) continue;
491           num_remote_leaves_total += ioffset[i + 1] - ioffset[i];
492           num_owned_interface_ranks++;
493         }
494       }
495       PetscCallMPI(MPI_Gather(&num_remote_roots_total, 1, MPIU_INT, gather_buffer, 1, MPIU_INT, 0, comm));
496       if (!rank) {
497         PetscCall(PetscSortInt(comm_size, gather_buffer));
498         part_boundary_dofs[0]           = gather_buffer[0];              // min
499         part_boundary_dofs[1]           = gather_buffer[comm_size - 1];  // max
500         part_boundary_dofs[2]           = gather_buffer[median_index];   // median
501         PetscReal part_shared_dof_ratio = (PetscReal)part_boundary_dofs[1] / (PetscReal)part_boundary_dofs[2];
502         PetscCall(PetscPrintf(
503             comm, "    Ghost Interface %" PetscInt_FMT "-DoF nodes        : %" PetscInt_FMT ", %" PetscInt_FMT ", %" PetscInt_FMT ", %f\n",
504             num_comp_q, part_boundary_dofs[0] / num_comp_q, part_boundary_dofs[1] / num_comp_q, part_boundary_dofs[2] / num_comp_q,
505             part_shared_dof_ratio));
506       }
507 
508       PetscCallMPI(MPI_Gather(&num_ghost_interface_ranks, 1, MPIU_INT, gather_buffer, 1, MPIU_INT, 0, comm));
509       if (!rank) {
510         PetscCall(PetscSortInt(comm_size, gather_buffer));
511         part_neighbors[0]              = gather_buffer[0];              // min
512         part_neighbors[1]              = gather_buffer[comm_size - 1];  // max
513         part_neighbors[2]              = gather_buffer[median_index];   // median
514         PetscReal part_neighbors_ratio = (PetscReal)part_neighbors[1] / (PetscReal)part_neighbors[2];
515         PetscCall(PetscPrintf(comm, "    Ghost Interface Ranks              : %" PetscInt_FMT ", %" PetscInt_FMT ", %" PetscInt_FMT ", %f\n",
516                               part_neighbors[0], part_neighbors[1], part_neighbors[2], part_neighbors_ratio));
517       }
518 
519       PetscCallMPI(MPI_Gather(&num_remote_leaves_total, 1, MPIU_INT, gather_buffer, 1, MPIU_INT, 0, comm));
520       if (!rank) {
521         PetscCall(PetscSortInt(comm_size, gather_buffer));
522         part_boundary_dofs[0]           = gather_buffer[0];              // min
523         part_boundary_dofs[1]           = gather_buffer[comm_size - 1];  // max
524         part_boundary_dofs[2]           = gather_buffer[median_index];   // median
525         PetscReal part_shared_dof_ratio = (PetscReal)part_boundary_dofs[1] / (PetscReal)part_boundary_dofs[2];
526         PetscCall(PetscPrintf(
527             comm, "    Owned Interface %" PetscInt_FMT "-DoF nodes        : %" PetscInt_FMT ", %" PetscInt_FMT ", %" PetscInt_FMT ", %f\n",
528             num_comp_q, part_boundary_dofs[0] / num_comp_q, part_boundary_dofs[1] / num_comp_q, part_boundary_dofs[2] / num_comp_q,
529             part_shared_dof_ratio));
530       }
531 
532       PetscCallMPI(MPI_Gather(&num_owned_interface_ranks, 1, MPIU_INT, gather_buffer, 1, MPIU_INT, 0, comm));
533       if (!rank) {
534         PetscCall(PetscSortInt(comm_size, gather_buffer));
535         part_neighbors[0]              = gather_buffer[0];              // min
536         part_neighbors[1]              = gather_buffer[comm_size - 1];  // max
537         part_neighbors[2]              = gather_buffer[median_index];   // median
538         PetscReal part_neighbors_ratio = (PetscReal)part_neighbors[1] / (PetscReal)part_neighbors[2];
539         PetscCall(PetscPrintf(comm, "    Owned Interface Ranks              : %" PetscInt_FMT ", %" PetscInt_FMT ", %" PetscInt_FMT ", %f\n",
540                               part_neighbors[0], part_neighbors[1], part_neighbors[2], part_neighbors_ratio));
541       }
542     }
543 
544     if (!rank) PetscCall(PetscFree(gather_buffer));
545   }
546   PetscFunctionReturn(PETSC_SUCCESS);
547 }
548