xref: /honee/src/misc.c (revision cf8f12c89a11a3279f97d0071ccf240f989bc25c)
1 // SPDX-FileCopyrightText: Copyright (c) 2017-2024, HONEE contributors.
2 // SPDX-License-Identifier: Apache-2.0 OR BSD-2-Clause
3 
4 /// @file
5 /// Miscellaneous utility functions
6 
7 #include <ceed.h>
8 #include <petscdm.h>
9 #include <petscsf.h>
10 #include <petscts.h>
11 
12 #include <navierstokes.h>
13 
14 PetscErrorCode ICs_FixMultiplicity(DM dm, Honee honee, Vec Q_loc, Vec Q, CeedScalar time) {
15   Ceed                ceed = honee->ceed;
16   CeedVector          mult_vec;
17   PetscMemType        m_mem_type;
18   Vec                 Multiplicity, Multiplicity_loc;
19   CeedElemRestriction elem_restr_q;
20 
21   PetscFunctionBeginUser;
22   if (honee->phys->ics_time_label) PetscCallCeed(ceed, CeedOperatorSetContextDouble(honee->op_ics_ctx->op, honee->phys->ics_time_label, &time));
23   PetscCall(ApplyCeedOperatorLocalToGlobal(NULL, Q, honee->op_ics_ctx));
24 
25   // -- Get multiplicity
26   PetscCall(DMPlexCeedElemRestrictionCreate(ceed, honee->dm, DMLABEL_DEFAULT, DMLABEL_DEFAULT_VALUE, 0, 0, &elem_restr_q));
27   PetscCallCeed(ceed, CeedElemRestrictionCreateVector(elem_restr_q, &mult_vec, NULL));
28   PetscCall(DMGetLocalVector(dm, &Multiplicity_loc));
29   PetscCall(VecPetscToCeed(Multiplicity_loc, &m_mem_type, mult_vec));
30   PetscCallCeed(ceed, CeedElemRestrictionGetMultiplicity(elem_restr_q, mult_vec));
31   PetscCall(VecCeedToPetsc(mult_vec, m_mem_type, Multiplicity_loc));
32 
33   PetscCall(DMGetGlobalVector(dm, &Multiplicity));
34   PetscCall(VecZeroEntries(Multiplicity));
35   PetscCall(DMLocalToGlobal(dm, Multiplicity_loc, ADD_VALUES, Multiplicity));
36 
37   // -- Fix multiplicity
38   PetscCall(VecPointwiseDivide(Q, Q, Multiplicity));
39   PetscCall(VecPointwiseDivide(Q_loc, Q_loc, Multiplicity_loc));
40 
41   PetscCall(DMRestoreLocalVector(dm, &Multiplicity_loc));
42   PetscCall(DMRestoreGlobalVector(dm, &Multiplicity));
43   PetscCallCeed(ceed, CeedVectorDestroy(&mult_vec));
44   PetscCallCeed(ceed, CeedElemRestrictionDestroy(&elem_restr_q));
45   PetscFunctionReturn(PETSC_SUCCESS);
46 }
47 
48 // Record boundary values from initial condition
49 PetscErrorCode SetBCsFromICs(DM dm, Vec Q, Vec Q_loc) {
50   PetscFunctionBeginUser;
51   {  // Capture initial condition values in Qbc
52     Vec Qbc;
53 
54     PetscCall(DMGetNamedLocalVector(dm, "Qbc", &Qbc));
55     PetscCall(VecCopy(Q_loc, Qbc));
56     PetscCall(VecZeroEntries(Q_loc));
57     PetscCall(DMGlobalToLocal(dm, Q, INSERT_VALUES, Q_loc));
58     PetscCall(VecAXPY(Qbc, -1., Q_loc));
59     PetscCall(DMRestoreNamedLocalVector(dm, "Qbc", &Qbc));
60   }
61   PetscCall(PetscObjectComposeFunction((PetscObject)dm, "DMPlexInsertBoundaryValues_C", DMPlexInsertBoundaryValues_FromICs));
62 
63   {  // Set boundary mask to zero out essential BCs
64     Vec boundary_mask, ones;
65 
66     PetscCall(DMGetNamedLocalVector(dm, "boundary mask", &boundary_mask));
67     PetscCall(DMGetGlobalVector(dm, &ones));
68     PetscCall(VecZeroEntries(boundary_mask));
69     PetscCall(VecSet(ones, 1.0));
70     PetscCall(DMGlobalToLocal(dm, ones, INSERT_VALUES, boundary_mask));
71     PetscCall(DMRestoreNamedLocalVector(dm, "boundary mask", &boundary_mask));
72     PetscCall(DMRestoreGlobalVector(dm, &ones));
73   }
74   PetscFunctionReturn(PETSC_SUCCESS);
75 }
76 
77 PetscErrorCode DMPlexInsertBoundaryValues_FromICs(DM dm, PetscBool insert_essential, Vec Q_loc, PetscReal time, Vec face_geom_FVM, Vec cell_geom_FVM,
78                                                   Vec grad_FVM) {
79   Vec Qbc, boundary_mask;
80 
81   PetscFunctionBeginUser;
82   // Mask (zero) Strong BC entries
83   PetscCall(DMGetNamedLocalVector(dm, "boundary mask", &boundary_mask));
84   PetscCall(VecPointwiseMult(Q_loc, Q_loc, boundary_mask));
85   PetscCall(DMRestoreNamedLocalVector(dm, "boundary mask", &boundary_mask));
86 
87   PetscCall(DMGetNamedLocalVector(dm, "Qbc", &Qbc));
88   PetscCall(VecAXPY(Q_loc, 1., Qbc));
89   PetscCall(DMRestoreNamedLocalVector(dm, "Qbc", &Qbc));
90   PetscFunctionReturn(PETSC_SUCCESS);
91 }
92 
93 // Compare reference solution values with current test run for CI
94 PetscErrorCode RegressionTest(AppCtx app_ctx, Vec Q) {
95   Vec         Qref;
96   PetscViewer viewer;
97   PetscReal   error, Qrefnorm;
98   MPI_Comm    comm = PetscObjectComm((PetscObject)Q);
99 
100   PetscFunctionBeginUser;
101   // Read reference file
102   PetscCall(VecDuplicate(Q, &Qref));
103   PetscCheck(strcmp(app_ctx->test_file_path, "") != 0, comm, PETSC_ERR_FILE_READ, "File for regression test not given");
104   PetscCall(PetscViewerBinaryOpen(comm, app_ctx->test_file_path, FILE_MODE_READ, &viewer));
105   PetscCall(HoneeLoadBinaryVec(viewer, Qref, NULL, NULL));
106 
107   // Compute error with respect to reference solution
108   PetscCall(VecAXPY(Q, -1.0, Qref));
109   PetscCall(VecNorm(Qref, NORM_MAX, &Qrefnorm));
110   PetscCall(VecScale(Q, 1. / Qrefnorm));
111   PetscCall(VecNorm(Q, NORM_MAX, &error));
112 
113   // Check error
114   if (error > app_ctx->test_tol) {
115     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Test failed with error norm %g\n", (double)error));
116   }
117 
118   // Cleanup
119   PetscCall(PetscViewerDestroy(&viewer));
120   PetscCall(VecDestroy(&Qref));
121   PetscFunctionReturn(PETSC_SUCCESS);
122 }
123 
124 // Get error for problems with exact solutions
125 PetscErrorCode PrintError(DM dm, Honee honee, Vec Q, PetscScalar final_time) {
126   PetscInt  loc_nodes;
127   Vec       Q_exact, Q_exact_loc;
128   PetscReal rel_error, norm_error, norm_exact;
129 
130   PetscFunctionBeginUser;
131   // Get exact solution at final time
132   PetscCall(DMGetGlobalVector(dm, &Q_exact));
133   PetscCall(DMGetLocalVector(dm, &Q_exact_loc));
134   PetscCall(VecGetSize(Q_exact_loc, &loc_nodes));
135   PetscCall(ICs_FixMultiplicity(dm, honee, Q_exact_loc, Q_exact, final_time));
136 
137   // Get |exact solution - obtained solution|
138   PetscCall(VecNorm(Q_exact, NORM_1, &norm_exact));
139   PetscCall(VecAXPY(Q, -1.0, Q_exact));
140   PetscCall(VecNorm(Q, NORM_1, &norm_error));
141 
142   rel_error = norm_error / norm_exact;
143   PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Relative Error: %g\n", (double)rel_error));
144   PetscCall(DMRestoreLocalVector(dm, &Q_exact_loc));
145   PetscCall(DMRestoreGlobalVector(dm, &Q_exact));
146   PetscFunctionReturn(PETSC_SUCCESS);
147 }
148 
149 // Post-processing
150 PetscErrorCode PostProcess(TS ts, DM dm, ProblemData problem, Honee honee, Vec Q, PetscScalar final_time) {
151   PetscInt          steps;
152   TSConvergedReason reason;
153 
154   PetscFunctionBeginUser;
155   // Print relative error
156   if (problem->compute_exact_solution_error && honee->app_ctx->test_type == TESTTYPE_NONE) {
157     PetscCall(PrintError(dm, honee, Q, final_time));
158   }
159 
160   // Print final time and number of steps
161   PetscCall(TSGetStepNumber(ts, &steps));
162   PetscCall(TSGetConvergedReason(ts, &reason));
163   if (honee->app_ctx->test_type == TESTTYPE_NONE) {
164     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Time integrator %s on time step %" PetscInt_FMT " with final time %g\n", TSConvergedReasons[reason],
165                           steps, (double)final_time));
166   }
167 
168   // Output numerical values from command line
169   PetscCall(VecViewFromOptions(Q, NULL, "-vec_view"));
170 
171   // Compare reference solution values with current test run for CI
172   if (honee->app_ctx->test_type == TESTTYPE_SOLVER) {
173     PetscCall(RegressionTest(honee->app_ctx, Q));
174   }
175   PetscFunctionReturn(PETSC_SUCCESS);
176 }
177 
178 // Free a plain data context that was allocated using PETSc; returning libCEED error codes
179 int FreeContextPetsc(void *data) {
180   if (PetscFree(data)) return CeedError(NULL, CEED_ERROR_ACCESS, "PetscFree failed");
181   return CEED_ERROR_SUCCESS;
182 }
183 
184 /**
185   @brief Destroy `NodalProjectionData` object
186 
187   @param[in] context `NodalProjectionData` object to destroy
188 **/
189 PetscErrorCode NodalProjectionDataDestroy(NodalProjectionData *context) {
190   NodalProjectionData context_ = *context;
191 
192   PetscFunctionBeginUser;
193   if (context_ == NULL) PetscFunctionReturn(PETSC_SUCCESS);
194   PetscCall(DMDestroy(&context_->dm));
195   PetscCall(KSPDestroy(&context_->ksp));
196   PetscCall(OperatorApplyContextDestroy(context_->l2_rhs_ctx));
197   PetscCall(PetscFree(context_));
198   *context = NULL;
199   PetscFunctionReturn(PETSC_SUCCESS);
200 }
201 
202 /**
203    @brief Sets the value of an option if it is not already set
204 
205    @param[in,out] options `PetscOptions` database, or `NULL` for default global database
206    @param[in]     name    Name of the option (with `-` prepended to it)
207    @param[in]     value   Value to set the option to
208 **/
209 PetscErrorCode HoneeOptionsSetValueDefault(PetscOptions options, const char name[], const char value[]) {
210   PetscBool has_option;
211 
212   PetscFunctionBeginUser;
213   PetscCall(PetscOptionsHasName(options, NULL, name, &has_option));
214   if (!has_option) PetscCall(PetscOptionsSetValue(options, name, value));
215   PetscFunctionReturn(PETSC_SUCCESS);
216 }
217 
218 PetscErrorCode HoneeCalculateDomainSize(Honee honee, PetscScalar *volume) {
219   DM                   dm   = honee->dm, dm_coord;
220   Ceed                 ceed = honee->ceed;
221   CeedQFunction        qf_mass;
222   CeedOperator         op_mass;
223   OperatorApplyContext op_mass_ctx;
224   CeedElemRestriction  elem_restr_qd;
225   CeedVector           qdata;
226   CeedInt              q_data_size, num_comps_x;
227   Vec                  u, v;
228 
229   PetscFunctionBeginUser;
230   PetscCall(DMGetCoordinateDM(dm, &dm_coord));
231   PetscCall(QDataGet(ceed, dm, DMLABEL_DEFAULT, DMLABEL_DEFAULT_VALUE, honee->elem_restr_x, honee->basis_x, honee->x_coord, &elem_restr_qd, &qdata,
232                      &q_data_size));
233   PetscCallCeed(ceed, CeedElemRestrictionGetNumComponents(honee->elem_restr_x, &num_comps_x));
234 
235   PetscCall(HoneeMassQFunctionCreate(ceed, num_comps_x, q_data_size, &qf_mass));
236   PetscCallCeed(ceed, CeedOperatorCreate(ceed, qf_mass, NULL, NULL, &op_mass));
237   PetscCallCeed(ceed, CeedOperatorSetField(op_mass, "u", honee->elem_restr_x, honee->basis_x, CEED_VECTOR_ACTIVE));
238   PetscCallCeed(ceed, CeedOperatorSetField(op_mass, "qdata", elem_restr_qd, CEED_BASIS_NONE, qdata));
239   PetscCallCeed(ceed, CeedOperatorSetField(op_mass, "v", honee->elem_restr_x, honee->basis_x, CEED_VECTOR_ACTIVE));
240 
241   PetscCall(OperatorApplyContextCreate(NULL, dm_coord, ceed, op_mass, NULL, NULL, NULL, NULL, &op_mass_ctx));
242   PetscCall(CeedOperatorCreateLocalVecs(op_mass, DMReturnVecType(dm), PETSC_COMM_SELF, &u, NULL));
243   PetscCall(DMCreateGlobalVector(dm_coord, &v));
244   PetscCall(VecSet(u, 1.));
245   PetscCall(ApplyCeedOperatorLocalToGlobal(u, v, op_mass_ctx));
246   PetscCall(VecSum(v, volume));
247   *volume /= num_comps_x;  // Correct for number of components != 1
248 
249   PetscCall(VecDestroy(&u));
250   PetscCall(VecDestroy(&v));
251   PetscCall(OperatorApplyContextDestroy(op_mass_ctx));
252   PetscCallCeed(ceed, CeedElemRestrictionDestroy(&elem_restr_qd));
253   PetscCallCeed(ceed, CeedVectorDestroy(&qdata));
254   PetscCallCeed(ceed, CeedQFunctionDestroy(&qf_mass));
255   PetscCallCeed(ceed, CeedOperatorDestroy(&op_mass));
256   PetscFunctionReturn(PETSC_SUCCESS);
257 }
258 
259 // Print information about the given simulation run
260 PetscErrorCode PrintRunInfo(Honee honee, Physics phys_ctx, ProblemData problem, TS ts) {
261   Ceed     ceed = honee->ceed;
262   MPI_Comm comm = PetscObjectComm((PetscObject)ts);
263 
264   PetscFunctionBeginUser;
265   // Header and rank
266   char        host_name[PETSC_MAX_PATH_LEN];
267   PetscMPIInt rank, comm_size;
268   PetscCall(PetscGetHostName(host_name, sizeof host_name));
269   PetscCallMPI(MPI_Comm_rank(comm, &rank));
270   PetscCallMPI(MPI_Comm_size(comm, &comm_size));
271   PetscCall(PetscPrintf(comm,
272                         "\n-- HONEE - High-Order Navier-stokes Equation Evaluator --\n"
273                         "  MPI:\n"
274                         "    Host Name                          : %s\n"
275                         "    Total ranks                        : %d\n",
276                         host_name, comm_size));
277 
278   // Problem specific info
279   PetscCall(problem->print_info(honee, problem, honee->app_ctx));
280 
281   // libCEED
282   const char *used_resource;
283   CeedMemType mem_type_backend;
284   PetscCallCeed(ceed, CeedGetResource(honee->ceed, &used_resource));
285   PetscCallCeed(ceed, CeedGetPreferredMemType(honee->ceed, &mem_type_backend));
286   PetscCall(PetscPrintf(comm,
287                         "  libCEED:\n"
288                         "    libCEED Backend                    : %s\n"
289                         "    libCEED Backend MemType            : %s\n",
290                         used_resource, CeedMemTypes[mem_type_backend]));
291   // PETSc
292   {
293     VecType  vec_type;
294     char     box_faces_str[PETSC_MAX_PATH_LEN] = "3,3,3";
295     PetscInt dim;
296 
297     PetscCall(DMGetDimension(honee->dm, &dim));
298     if (dim == 2) box_faces_str[3] = '\0';
299     PetscCall(PetscOptionsGetString(NULL, NULL, "-dm_plex_box_faces", box_faces_str, sizeof(box_faces_str), NULL));
300     PetscCall(DMGetVecType(honee->dm, &vec_type));
301     PetscCall(PetscPrintf(comm,
302                           "  PETSc:\n"
303                           "    Box Faces                          : %s\n"
304                           "    DM VecType                         : %s\n"
305                           "    Time Stepping Scheme               : %s\n",
306                           box_faces_str, vec_type, phys_ctx->implicit ? "implicit" : "explicit"));
307   }
308   {
309     char           pmat_type_str[PETSC_MAX_PATH_LEN];
310     MatType        amat_type, pmat_type;
311     Mat            Amat, Pmat;
312     TSIJacobianFn *ijacob_function;
313 
314     PetscCall(TSGetIJacobian(ts, &Amat, &Pmat, &ijacob_function, NULL));
315     PetscCall(MatGetType(Amat, &amat_type));
316     PetscCall(MatGetType(Pmat, &pmat_type));
317 
318     PetscCall(PetscStrncpy(pmat_type_str, pmat_type, sizeof(pmat_type_str)));
319     if (!strcmp(pmat_type, MATCEED)) {
320       MatType pmat_coo_type;
321       char    pmat_coo_type_str[PETSC_MAX_PATH_LEN];
322 
323       PetscCall(MatCeedGetCOOMatType(Pmat, &pmat_coo_type));
324       PetscCall(PetscSNPrintf(pmat_coo_type_str, sizeof(pmat_coo_type_str), " (COO MatType: %s)", pmat_coo_type));
325       PetscCall(PetscStrlcat(pmat_type_str, pmat_coo_type_str, sizeof(pmat_type_str)));
326     }
327     if (ijacob_function) {
328       PetscCall(PetscPrintf(comm,
329                             "    IJacobian A MatType                : %s\n"
330                             "    IJacobian P MatType                : %s\n",
331                             amat_type, pmat_type_str));
332     }
333   }
334   if (honee->app_ctx->use_continue_file) {
335     PetscCall(PetscPrintf(comm,
336                           "  Continue:\n"
337                           "    Filename:                          : %s\n"
338                           "    Step:                              : %" PetscInt_FMT "\n"
339                           "    Time:                              : %g\n",
340                           honee->app_ctx->cont_file, honee->app_ctx->cont_steps, honee->app_ctx->cont_time));
341   }
342   // Mesh
343   const PetscInt num_comp_q = 5;
344   PetscInt       glob_dofs, owned_dofs, local_dofs;
345   const CeedInt  num_P = honee->app_ctx->degree + 1, num_Q = num_P + honee->app_ctx->q_extra;
346   PetscCall(DMGetGlobalVectorInfo(honee->dm, &owned_dofs, &glob_dofs, NULL));
347   PetscCall(DMGetLocalVectorInfo(honee->dm, &local_dofs, NULL, NULL));
348   PetscCall(PetscPrintf(comm,
349                         "  Mesh:\n"
350                         "    Number of 1D Basis Nodes (P)       : %" CeedInt_FMT "\n"
351                         "    Number of 1D Quadrature Points (Q) : %" CeedInt_FMT "\n"
352                         "    Global DoFs                        : %" PetscInt_FMT "\n"
353                         "    DoFs per node                      : %" PetscInt_FMT "\n"
354                         "    Global %" PetscInt_FMT "-DoF nodes                 : %" PetscInt_FMT "\n",
355                         num_P, num_Q, glob_dofs, num_comp_q, num_comp_q, glob_dofs / num_comp_q));
356   // -- Get Partition Statistics
357   PetscCall(PetscPrintf(comm, "  Partition:                             (min,max,median,max/median)\n"));
358   {
359     PetscInt *gather_buffer = NULL;
360     PetscInt  part_owned_dofs[3], part_local_dofs[3], part_boundary_dofs[3], part_neighbors[3];
361     PetscInt  median_index = comm_size % 2 ? comm_size / 2 : comm_size / 2 - 1;
362     if (!rank) PetscCall(PetscMalloc1(comm_size, &gather_buffer));
363 
364     PetscCallMPI(MPI_Gather(&owned_dofs, 1, MPIU_INT, gather_buffer, 1, MPIU_INT, 0, comm));
365     if (!rank) {
366       PetscCall(PetscSortInt(comm_size, gather_buffer));
367       part_owned_dofs[0]             = gather_buffer[0];              // min
368       part_owned_dofs[1]             = gather_buffer[comm_size - 1];  // max
369       part_owned_dofs[2]             = gather_buffer[median_index];   // median
370       PetscReal part_owned_dof_ratio = (PetscReal)part_owned_dofs[1] / (PetscReal)part_owned_dofs[2];
371       PetscCall(PetscPrintf(comm,
372                             "    Global Vector %" PetscInt_FMT "-DoF nodes          : %" PetscInt_FMT ", %" PetscInt_FMT ", %" PetscInt_FMT ", %f\n",
373                             num_comp_q, part_owned_dofs[0] / num_comp_q, part_owned_dofs[1] / num_comp_q, part_owned_dofs[2] / num_comp_q,
374                             part_owned_dof_ratio));
375     }
376 
377     PetscCallMPI(MPI_Gather(&local_dofs, 1, MPIU_INT, gather_buffer, 1, MPIU_INT, 0, comm));
378     if (!rank) {
379       PetscCall(PetscSortInt(comm_size, gather_buffer));
380       part_local_dofs[0]             = gather_buffer[0];              // min
381       part_local_dofs[1]             = gather_buffer[comm_size - 1];  // max
382       part_local_dofs[2]             = gather_buffer[median_index];   // median
383       PetscReal part_local_dof_ratio = (PetscReal)part_local_dofs[1] / (PetscReal)part_local_dofs[2];
384       PetscCall(PetscPrintf(comm,
385                             "    Local Vector %" PetscInt_FMT "-DoF nodes           : %" PetscInt_FMT ", %" PetscInt_FMT ", %" PetscInt_FMT ", %f\n",
386                             num_comp_q, part_local_dofs[0] / num_comp_q, part_local_dofs[1] / num_comp_q, part_local_dofs[2] / num_comp_q,
387                             part_local_dof_ratio));
388     }
389 
390     if (comm_size != 1) {
391       PetscInt num_remote_roots_total = 0, num_remote_leaves_total = 0, num_ghost_interface_ranks = 0, num_owned_interface_ranks = 0;
392       {
393         PetscSF            sf;
394         PetscMPIInt        nrranks, niranks;
395         const PetscInt    *roffset, *rmine, *rremote, *ioffset, *irootloc;
396         const PetscMPIInt *rranks, *iranks;
397         PetscCall(DMGetSectionSF(honee->dm, &sf));
398         PetscCall(PetscSFGetRootRanks(sf, &nrranks, &rranks, &roffset, &rmine, &rremote));
399         PetscCall(PetscSFGetLeafRanks(sf, &niranks, &iranks, &ioffset, &irootloc));
400         for (PetscInt i = 0; i < nrranks; i++) {
401           if (rranks[i] == rank) continue;  // Ignore same-part global->local transfers
402           num_remote_roots_total += roffset[i + 1] - roffset[i];
403           num_ghost_interface_ranks++;
404         }
405         for (PetscInt i = 0; i < niranks; i++) {
406           if (iranks[i] == rank) continue;
407           num_remote_leaves_total += ioffset[i + 1] - ioffset[i];
408           num_owned_interface_ranks++;
409         }
410       }
411       PetscCallMPI(MPI_Gather(&num_remote_roots_total, 1, MPIU_INT, gather_buffer, 1, MPIU_INT, 0, comm));
412       if (!rank) {
413         PetscCall(PetscSortInt(comm_size, gather_buffer));
414         part_boundary_dofs[0]           = gather_buffer[0];              // min
415         part_boundary_dofs[1]           = gather_buffer[comm_size - 1];  // max
416         part_boundary_dofs[2]           = gather_buffer[median_index];   // median
417         PetscReal part_shared_dof_ratio = (PetscReal)part_boundary_dofs[1] / (PetscReal)part_boundary_dofs[2];
418         PetscCall(PetscPrintf(comm,
419                               "    Ghost Interface %" PetscInt_FMT "-DoF nodes        : %" PetscInt_FMT ", %" PetscInt_FMT ", %" PetscInt_FMT
420                               ", %f\n",
421                               num_comp_q, part_boundary_dofs[0] / num_comp_q, part_boundary_dofs[1] / num_comp_q, part_boundary_dofs[2] / num_comp_q,
422                               part_shared_dof_ratio));
423       }
424 
425       PetscCallMPI(MPI_Gather(&num_ghost_interface_ranks, 1, MPIU_INT, gather_buffer, 1, MPIU_INT, 0, comm));
426       if (!rank) {
427         PetscCall(PetscSortInt(comm_size, gather_buffer));
428         part_neighbors[0]              = gather_buffer[0];              // min
429         part_neighbors[1]              = gather_buffer[comm_size - 1];  // max
430         part_neighbors[2]              = gather_buffer[median_index];   // median
431         PetscReal part_neighbors_ratio = (PetscReal)part_neighbors[1] / (PetscReal)part_neighbors[2];
432         PetscCall(PetscPrintf(comm, "    Ghost Interface Ranks              : %" PetscInt_FMT ", %" PetscInt_FMT ", %" PetscInt_FMT ", %f\n",
433                               part_neighbors[0], part_neighbors[1], part_neighbors[2], part_neighbors_ratio));
434       }
435 
436       PetscCallMPI(MPI_Gather(&num_remote_leaves_total, 1, MPIU_INT, gather_buffer, 1, MPIU_INT, 0, comm));
437       if (!rank) {
438         PetscCall(PetscSortInt(comm_size, gather_buffer));
439         part_boundary_dofs[0]           = gather_buffer[0];              // min
440         part_boundary_dofs[1]           = gather_buffer[comm_size - 1];  // max
441         part_boundary_dofs[2]           = gather_buffer[median_index];   // median
442         PetscReal part_shared_dof_ratio = (PetscReal)part_boundary_dofs[1] / (PetscReal)part_boundary_dofs[2];
443         PetscCall(PetscPrintf(comm,
444                               "    Owned Interface %" PetscInt_FMT "-DoF nodes        : %" PetscInt_FMT ", %" PetscInt_FMT ", %" PetscInt_FMT
445                               ", %f\n",
446                               num_comp_q, part_boundary_dofs[0] / num_comp_q, part_boundary_dofs[1] / num_comp_q, part_boundary_dofs[2] / num_comp_q,
447                               part_shared_dof_ratio));
448       }
449 
450       PetscCallMPI(MPI_Gather(&num_owned_interface_ranks, 1, MPIU_INT, gather_buffer, 1, MPIU_INT, 0, comm));
451       if (!rank) {
452         PetscCall(PetscSortInt(comm_size, gather_buffer));
453         part_neighbors[0]              = gather_buffer[0];              // min
454         part_neighbors[1]              = gather_buffer[comm_size - 1];  // max
455         part_neighbors[2]              = gather_buffer[median_index];   // median
456         PetscReal part_neighbors_ratio = (PetscReal)part_neighbors[1] / (PetscReal)part_neighbors[2];
457         PetscCall(PetscPrintf(comm, "    Owned Interface Ranks              : %" PetscInt_FMT ", %" PetscInt_FMT ", %" PetscInt_FMT ", %f\n",
458                               part_neighbors[0], part_neighbors[1], part_neighbors[2], part_neighbors_ratio));
459       }
460     }
461 
462     if (!rank) PetscCall(PetscFree(gather_buffer));
463   }
464   PetscFunctionReturn(PETSC_SUCCESS);
465 }
466