1 // SPDX-FileCopyrightText: Copyright (c) 2017-2024, HONEE contributors. 2 // SPDX-License-Identifier: Apache-2.0 OR BSD-2-Clause 3 4 /// @file 5 /// Miscellaneous utility functions 6 7 #include <ceed.h> 8 #include <petscdm.h> 9 #include <petscsf.h> 10 #include <petscts.h> 11 12 #include <navierstokes.h> 13 14 PetscErrorCode ICs_FixMultiplicity(DM dm, Honee honee, Vec Q_loc, Vec Q, CeedScalar time) { 15 Ceed ceed = honee->ceed; 16 CeedVector mult_vec; 17 PetscMemType m_mem_type; 18 Vec Multiplicity, Multiplicity_loc; 19 20 PetscFunctionBeginUser; 21 if (honee->phys->ics_time_label) PetscCallCeed(ceed, CeedOperatorSetContextDouble(honee->op_ics_ctx->op, honee->phys->ics_time_label, &time)); 22 PetscCall(ApplyCeedOperatorLocalToGlobal(NULL, Q, honee->op_ics_ctx)); 23 24 PetscCallCeed(ceed, CeedElemRestrictionCreateVector(honee->elem_restr_q, &mult_vec, NULL)); 25 26 // -- Get multiplicity 27 PetscCall(DMGetLocalVector(dm, &Multiplicity_loc)); 28 PetscCall(VecPetscToCeed(Multiplicity_loc, &m_mem_type, mult_vec)); 29 PetscCallCeed(ceed, CeedElemRestrictionGetMultiplicity(honee->elem_restr_q, mult_vec)); 30 PetscCall(VecCeedToPetsc(mult_vec, m_mem_type, Multiplicity_loc)); 31 32 PetscCall(DMGetGlobalVector(dm, &Multiplicity)); 33 PetscCall(VecZeroEntries(Multiplicity)); 34 PetscCall(DMLocalToGlobal(dm, Multiplicity_loc, ADD_VALUES, Multiplicity)); 35 36 // -- Fix multiplicity 37 PetscCall(VecPointwiseDivide(Q, Q, Multiplicity)); 38 PetscCall(VecPointwiseDivide(Q_loc, Q_loc, Multiplicity_loc)); 39 40 PetscCall(DMRestoreLocalVector(dm, &Multiplicity_loc)); 41 PetscCall(DMRestoreGlobalVector(dm, &Multiplicity)); 42 PetscCallCeed(ceed, CeedVectorDestroy(&mult_vec)); 43 PetscFunctionReturn(PETSC_SUCCESS); 44 } 45 46 // Record boundary values from initial condition 47 PetscErrorCode SetBCsFromICs(DM dm, Vec Q, Vec Q_loc) { 48 PetscFunctionBeginUser; 49 { // Capture initial condition values in Qbc 50 Vec Qbc; 51 52 PetscCall(DMGetNamedLocalVector(dm, "Qbc", &Qbc)); 53 PetscCall(VecCopy(Q_loc, Qbc)); 54 PetscCall(VecZeroEntries(Q_loc)); 55 PetscCall(DMGlobalToLocal(dm, Q, INSERT_VALUES, Q_loc)); 56 PetscCall(VecAXPY(Qbc, -1., Q_loc)); 57 PetscCall(DMRestoreNamedLocalVector(dm, "Qbc", &Qbc)); 58 } 59 PetscCall(PetscObjectComposeFunction((PetscObject)dm, "DMPlexInsertBoundaryValues_C", DMPlexInsertBoundaryValues_FromICs)); 60 61 { // Set boundary mask to zero out essential BCs 62 Vec boundary_mask, ones; 63 64 PetscCall(DMGetNamedLocalVector(dm, "boundary mask", &boundary_mask)); 65 PetscCall(DMGetGlobalVector(dm, &ones)); 66 PetscCall(VecZeroEntries(boundary_mask)); 67 PetscCall(VecSet(ones, 1.0)); 68 PetscCall(DMGlobalToLocal(dm, ones, INSERT_VALUES, boundary_mask)); 69 PetscCall(DMRestoreNamedLocalVector(dm, "boundary mask", &boundary_mask)); 70 PetscCall(DMRestoreGlobalVector(dm, &ones)); 71 } 72 PetscFunctionReturn(PETSC_SUCCESS); 73 } 74 75 PetscErrorCode DMPlexInsertBoundaryValues_FromICs(DM dm, PetscBool insert_essential, Vec Q_loc, PetscReal time, Vec face_geom_FVM, Vec cell_geom_FVM, 76 Vec grad_FVM) { 77 Vec Qbc, boundary_mask; 78 79 PetscFunctionBeginUser; 80 // Mask (zero) Strong BC entries 81 PetscCall(DMGetNamedLocalVector(dm, "boundary mask", &boundary_mask)); 82 PetscCall(VecPointwiseMult(Q_loc, Q_loc, boundary_mask)); 83 PetscCall(DMRestoreNamedLocalVector(dm, "boundary mask", &boundary_mask)); 84 85 PetscCall(DMGetNamedLocalVector(dm, "Qbc", &Qbc)); 86 PetscCall(VecAXPY(Q_loc, 1., Qbc)); 87 PetscCall(DMRestoreNamedLocalVector(dm, "Qbc", &Qbc)); 88 PetscFunctionReturn(PETSC_SUCCESS); 89 } 90 91 // Compare reference solution values with current test run for CI 92 PetscErrorCode RegressionTest(AppCtx app_ctx, Vec Q) { 93 Vec Qref; 94 PetscViewer viewer; 95 PetscReal error, Qrefnorm; 96 MPI_Comm comm = PetscObjectComm((PetscObject)Q); 97 98 PetscFunctionBeginUser; 99 // Read reference file 100 PetscCall(VecDuplicate(Q, &Qref)); 101 PetscCheck(strcmp(app_ctx->test_file_path, "") != 0, comm, PETSC_ERR_FILE_READ, "File for regression test not given"); 102 PetscCall(PetscViewerBinaryOpen(comm, app_ctx->test_file_path, FILE_MODE_READ, &viewer)); 103 PetscCall(HoneeLoadBinaryVec(viewer, Qref, NULL, NULL)); 104 105 // Compute error with respect to reference solution 106 PetscCall(VecAXPY(Q, -1.0, Qref)); 107 PetscCall(VecNorm(Qref, NORM_MAX, &Qrefnorm)); 108 PetscCall(VecScale(Q, 1. / Qrefnorm)); 109 PetscCall(VecNorm(Q, NORM_MAX, &error)); 110 111 // Check error 112 if (error > app_ctx->test_tol) { 113 PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Test failed with error norm %g\n", (double)error)); 114 } 115 116 // Cleanup 117 PetscCall(PetscViewerDestroy(&viewer)); 118 PetscCall(VecDestroy(&Qref)); 119 PetscFunctionReturn(PETSC_SUCCESS); 120 } 121 122 // Get error for problems with exact solutions 123 PetscErrorCode PrintError(DM dm, Honee honee, Vec Q, PetscScalar final_time) { 124 PetscInt loc_nodes; 125 Vec Q_exact, Q_exact_loc; 126 PetscReal rel_error, norm_error, norm_exact; 127 128 PetscFunctionBeginUser; 129 // Get exact solution at final time 130 PetscCall(DMGetGlobalVector(dm, &Q_exact)); 131 PetscCall(DMGetLocalVector(dm, &Q_exact_loc)); 132 PetscCall(VecGetSize(Q_exact_loc, &loc_nodes)); 133 PetscCall(ICs_FixMultiplicity(dm, honee, Q_exact_loc, Q_exact, final_time)); 134 135 // Get |exact solution - obtained solution| 136 PetscCall(VecNorm(Q_exact, NORM_1, &norm_exact)); 137 PetscCall(VecAXPY(Q, -1.0, Q_exact)); 138 PetscCall(VecNorm(Q, NORM_1, &norm_error)); 139 140 rel_error = norm_error / norm_exact; 141 PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Relative Error: %g\n", (double)rel_error)); 142 PetscCall(DMRestoreLocalVector(dm, &Q_exact_loc)); 143 PetscCall(DMRestoreGlobalVector(dm, &Q_exact)); 144 PetscFunctionReturn(PETSC_SUCCESS); 145 } 146 147 // Post-processing 148 PetscErrorCode PostProcess(TS ts, DM dm, ProblemData problem, Honee honee, Vec Q, PetscScalar final_time) { 149 PetscInt steps; 150 TSConvergedReason reason; 151 152 PetscFunctionBeginUser; 153 // Print relative error 154 if (problem->compute_exact_solution_error && honee->app_ctx->test_type == TESTTYPE_NONE) { 155 PetscCall(PrintError(dm, honee, Q, final_time)); 156 } 157 158 // Print final time and number of steps 159 PetscCall(TSGetStepNumber(ts, &steps)); 160 PetscCall(TSGetConvergedReason(ts, &reason)); 161 if (honee->app_ctx->test_type == TESTTYPE_NONE) { 162 PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Time integrator %s on time step %" PetscInt_FMT " with final time %g\n", TSConvergedReasons[reason], 163 steps, (double)final_time)); 164 } 165 166 // Output numerical values from command line 167 PetscCall(VecViewFromOptions(Q, NULL, "-vec_view")); 168 169 // Compare reference solution values with current test run for CI 170 if (honee->app_ctx->test_type == TESTTYPE_SOLVER) { 171 PetscCall(RegressionTest(honee->app_ctx, Q)); 172 } 173 PetscFunctionReturn(PETSC_SUCCESS); 174 } 175 176 // Free a plain data context that was allocated using PETSc; returning libCEED error codes 177 int FreeContextPetsc(void *data) { 178 if (PetscFree(data)) return CeedError(NULL, CEED_ERROR_ACCESS, "PetscFree failed"); 179 return CEED_ERROR_SUCCESS; 180 } 181 182 /** 183 @brief Destroy `NodalProjectionData` object 184 185 @param[in] context `NodalProjectionData` object to destroy 186 **/ 187 PetscErrorCode NodalProjectionDataDestroy(NodalProjectionData *context) { 188 NodalProjectionData context_ = *context; 189 PetscFunctionBeginUser; 190 if (context_ == NULL) PetscFunctionReturn(PETSC_SUCCESS); 191 PetscCall(DMDestroy(&context_->dm)); 192 PetscCall(KSPDestroy(&context_->ksp)); 193 PetscCall(OperatorApplyContextDestroy(context_->l2_rhs_ctx)); 194 PetscCall(PetscFree(context_)); 195 *context = NULL; 196 PetscFunctionReturn(PETSC_SUCCESS); 197 } 198 199 /** 200 @brief Sets the value of an option if it is not already set 201 202 @param[in,out] options `PetscOptions` database, or `NULL` for default global database 203 @param[in] name Name of the option (with `-` prepended to it) 204 @param[in] value Value to set the option to 205 **/ 206 PetscErrorCode HoneeOptionsSetValueDefault(PetscOptions options, const char name[], const char value[]) { 207 PetscBool has_option; 208 209 PetscFunctionBeginUser; 210 PetscCall(PetscOptionsHasName(options, NULL, name, &has_option)); 211 if (!has_option) PetscCall(PetscOptionsSetValue(options, name, value)); 212 PetscFunctionReturn(PETSC_SUCCESS); 213 } 214 215 PetscErrorCode HoneeCalculateDomainSize(Honee honee, PetscScalar *volume) { 216 DM dm = honee->dm, dm_coord; 217 Ceed ceed = honee->ceed; 218 CeedQFunction qf_mass; 219 CeedOperator op_mass; 220 OperatorApplyContext op_mass_ctx; 221 CeedElemRestriction elem_restr_qd; 222 CeedVector qdata; 223 CeedInt q_data_size, num_comps_x; 224 DMLabel domain_label = 0; 225 PetscInt label_value = 0; 226 Vec u, v; 227 228 PetscFunctionBeginUser; 229 PetscCall(DMGetCoordinateDM(dm, &dm_coord)); 230 PetscCall(QDataGet(ceed, dm, domain_label, label_value, honee->elem_restr_x, honee->basis_x, honee->x_coord, &elem_restr_qd, &qdata, &q_data_size)); 231 PetscCallCeed(ceed, CeedElemRestrictionGetNumComponents(honee->elem_restr_x, &num_comps_x)); 232 233 PetscCall(HoneeMassQFunctionCreate(ceed, num_comps_x, q_data_size, &qf_mass)); 234 PetscCallCeed(ceed, CeedOperatorCreate(ceed, qf_mass, NULL, NULL, &op_mass)); 235 PetscCallCeed(ceed, CeedOperatorSetField(op_mass, "u", honee->elem_restr_x, honee->basis_x, CEED_VECTOR_ACTIVE)); 236 PetscCallCeed(ceed, CeedOperatorSetField(op_mass, "qdata", elem_restr_qd, CEED_BASIS_NONE, qdata)); 237 PetscCallCeed(ceed, CeedOperatorSetField(op_mass, "v", honee->elem_restr_x, honee->basis_x, CEED_VECTOR_ACTIVE)); 238 239 PetscCall(OperatorApplyContextCreate(NULL, dm_coord, ceed, op_mass, NULL, NULL, NULL, NULL, &op_mass_ctx)); 240 PetscCall(CeedOperatorCreateLocalVecs(op_mass, DMReturnVecType(dm), PETSC_COMM_SELF, &u, NULL)); 241 PetscCall(DMCreateGlobalVector(dm_coord, &v)); 242 PetscCall(VecSet(u, 1.)); 243 PetscCall(ApplyCeedOperatorLocalToGlobal(u, v, op_mass_ctx)); 244 PetscCall(VecSum(v, volume)); 245 *volume /= num_comps_x; // Correct for number of components != 1 246 247 PetscCall(VecDestroy(&u)); 248 PetscCall(VecDestroy(&v)); 249 PetscCall(OperatorApplyContextDestroy(op_mass_ctx)); 250 PetscCallCeed(ceed, CeedElemRestrictionDestroy(&elem_restr_qd)); 251 PetscCallCeed(ceed, CeedVectorDestroy(&qdata)); 252 PetscCallCeed(ceed, CeedQFunctionDestroy(&qf_mass)); 253 PetscCallCeed(ceed, CeedOperatorDestroy(&op_mass)); 254 PetscFunctionReturn(PETSC_SUCCESS); 255 } 256 257 // Print information about the given simulation run 258 PetscErrorCode PrintRunInfo(Honee honee, Physics phys_ctx, ProblemData problem, TS ts) { 259 Ceed ceed = honee->ceed; 260 MPI_Comm comm = PetscObjectComm((PetscObject)ts); 261 262 PetscFunctionBeginUser; 263 // Header and rank 264 char host_name[PETSC_MAX_PATH_LEN]; 265 PetscMPIInt rank, comm_size; 266 PetscCall(PetscGetHostName(host_name, sizeof host_name)); 267 PetscCallMPI(MPI_Comm_rank(comm, &rank)); 268 PetscCallMPI(MPI_Comm_size(comm, &comm_size)); 269 PetscCall(PetscPrintf(comm, 270 "\n-- HONEE - High-Order Navier-stokes Equation Evaluator --\n" 271 " MPI:\n" 272 " Host Name : %s\n" 273 " Total ranks : %d\n", 274 host_name, comm_size)); 275 276 // Problem specific info 277 PetscCall(problem->print_info(honee, problem, honee->app_ctx)); 278 279 // libCEED 280 const char *used_resource; 281 CeedMemType mem_type_backend; 282 PetscCallCeed(ceed, CeedGetResource(honee->ceed, &used_resource)); 283 PetscCallCeed(ceed, CeedGetPreferredMemType(honee->ceed, &mem_type_backend)); 284 PetscCall(PetscPrintf(comm, 285 " libCEED:\n" 286 " libCEED Backend : %s\n" 287 " libCEED Backend MemType : %s\n", 288 used_resource, CeedMemTypes[mem_type_backend])); 289 // PETSc 290 { 291 VecType vec_type; 292 char box_faces_str[PETSC_MAX_PATH_LEN] = "3,3,3"; 293 PetscInt dim; 294 295 PetscCall(DMGetDimension(honee->dm, &dim)); 296 if (dim == 2) box_faces_str[3] = '\0'; 297 PetscCall(PetscOptionsGetString(NULL, NULL, "-dm_plex_box_faces", box_faces_str, sizeof(box_faces_str), NULL)); 298 PetscCall(DMGetVecType(honee->dm, &vec_type)); 299 PetscCall(PetscPrintf(comm, 300 " PETSc:\n" 301 " Box Faces : %s\n" 302 " DM VecType : %s\n" 303 " Time Stepping Scheme : %s\n", 304 box_faces_str, vec_type, phys_ctx->implicit ? "implicit" : "explicit")); 305 } 306 { 307 char pmat_type_str[PETSC_MAX_PATH_LEN]; 308 MatType amat_type, pmat_type; 309 Mat Amat, Pmat; 310 TSIJacobianFn *ijacob_function; 311 312 PetscCall(TSGetIJacobian(ts, &Amat, &Pmat, &ijacob_function, NULL)); 313 PetscCall(MatGetType(Amat, &amat_type)); 314 PetscCall(MatGetType(Pmat, &pmat_type)); 315 316 PetscCall(PetscStrncpy(pmat_type_str, pmat_type, sizeof(pmat_type_str))); 317 if (!strcmp(pmat_type, MATCEED)) { 318 MatType pmat_coo_type; 319 char pmat_coo_type_str[PETSC_MAX_PATH_LEN]; 320 321 PetscCall(MatCeedGetCOOMatType(Pmat, &pmat_coo_type)); 322 PetscCall(PetscSNPrintf(pmat_coo_type_str, sizeof(pmat_coo_type_str), " (COO MatType: %s)", pmat_coo_type)); 323 PetscCall(PetscStrlcat(pmat_type_str, pmat_coo_type_str, sizeof(pmat_type_str))); 324 } 325 if (ijacob_function) { 326 PetscCall(PetscPrintf(comm, 327 " IJacobian A MatType : %s\n" 328 " IJacobian P MatType : %s\n", 329 amat_type, pmat_type_str)); 330 } 331 } 332 if (honee->app_ctx->use_continue_file) { 333 PetscCall(PetscPrintf(comm, 334 " Continue:\n" 335 " Filename: : %s\n" 336 " Step: : %" PetscInt_FMT "\n" 337 " Time: : %g\n", 338 honee->app_ctx->cont_file, honee->app_ctx->cont_steps, honee->app_ctx->cont_time)); 339 } 340 // Mesh 341 const PetscInt num_comp_q = 5; 342 PetscInt glob_dofs, owned_dofs, local_dofs; 343 const CeedInt num_P = honee->app_ctx->degree + 1, num_Q = num_P + honee->app_ctx->q_extra; 344 PetscCall(DMGetGlobalVectorInfo(honee->dm, &owned_dofs, &glob_dofs, NULL)); 345 PetscCall(DMGetLocalVectorInfo(honee->dm, &local_dofs, NULL, NULL)); 346 PetscCall(PetscPrintf(comm, 347 " Mesh:\n" 348 " Number of 1D Basis Nodes (P) : %" CeedInt_FMT "\n" 349 " Number of 1D Quadrature Points (Q) : %" CeedInt_FMT "\n" 350 " Global DoFs : %" PetscInt_FMT "\n" 351 " DoFs per node : %" PetscInt_FMT "\n" 352 " Global %" PetscInt_FMT "-DoF nodes : %" PetscInt_FMT "\n", 353 num_P, num_Q, glob_dofs, num_comp_q, num_comp_q, glob_dofs / num_comp_q)); 354 // -- Get Partition Statistics 355 PetscCall(PetscPrintf(comm, " Partition: (min,max,median,max/median)\n")); 356 { 357 PetscInt *gather_buffer = NULL; 358 PetscInt part_owned_dofs[3], part_local_dofs[3], part_boundary_dofs[3], part_neighbors[3]; 359 PetscInt median_index = comm_size % 2 ? comm_size / 2 : comm_size / 2 - 1; 360 if (!rank) PetscCall(PetscMalloc1(comm_size, &gather_buffer)); 361 362 PetscCallMPI(MPI_Gather(&owned_dofs, 1, MPIU_INT, gather_buffer, 1, MPIU_INT, 0, comm)); 363 if (!rank) { 364 PetscCall(PetscSortInt(comm_size, gather_buffer)); 365 part_owned_dofs[0] = gather_buffer[0]; // min 366 part_owned_dofs[1] = gather_buffer[comm_size - 1]; // max 367 part_owned_dofs[2] = gather_buffer[median_index]; // median 368 PetscReal part_owned_dof_ratio = (PetscReal)part_owned_dofs[1] / (PetscReal)part_owned_dofs[2]; 369 PetscCall(PetscPrintf( 370 comm, " Global Vector %" PetscInt_FMT "-DoF nodes : %" PetscInt_FMT ", %" PetscInt_FMT ", %" PetscInt_FMT ", %f\n", num_comp_q, 371 part_owned_dofs[0] / num_comp_q, part_owned_dofs[1] / num_comp_q, part_owned_dofs[2] / num_comp_q, part_owned_dof_ratio)); 372 } 373 374 PetscCallMPI(MPI_Gather(&local_dofs, 1, MPIU_INT, gather_buffer, 1, MPIU_INT, 0, comm)); 375 if (!rank) { 376 PetscCall(PetscSortInt(comm_size, gather_buffer)); 377 part_local_dofs[0] = gather_buffer[0]; // min 378 part_local_dofs[1] = gather_buffer[comm_size - 1]; // max 379 part_local_dofs[2] = gather_buffer[median_index]; // median 380 PetscReal part_local_dof_ratio = (PetscReal)part_local_dofs[1] / (PetscReal)part_local_dofs[2]; 381 PetscCall(PetscPrintf( 382 comm, " Local Vector %" PetscInt_FMT "-DoF nodes : %" PetscInt_FMT ", %" PetscInt_FMT ", %" PetscInt_FMT ", %f\n", num_comp_q, 383 part_local_dofs[0] / num_comp_q, part_local_dofs[1] / num_comp_q, part_local_dofs[2] / num_comp_q, part_local_dof_ratio)); 384 } 385 386 if (comm_size != 1) { 387 PetscInt num_remote_roots_total = 0, num_remote_leaves_total = 0, num_ghost_interface_ranks = 0, num_owned_interface_ranks = 0; 388 { 389 PetscSF sf; 390 PetscMPIInt nrranks, niranks; 391 const PetscInt *roffset, *rmine, *rremote, *ioffset, *irootloc; 392 const PetscMPIInt *rranks, *iranks; 393 PetscCall(DMGetSectionSF(honee->dm, &sf)); 394 PetscCall(PetscSFGetRootRanks(sf, &nrranks, &rranks, &roffset, &rmine, &rremote)); 395 PetscCall(PetscSFGetLeafRanks(sf, &niranks, &iranks, &ioffset, &irootloc)); 396 for (PetscInt i = 0; i < nrranks; i++) { 397 if (rranks[i] == rank) continue; // Ignore same-part global->local transfers 398 num_remote_roots_total += roffset[i + 1] - roffset[i]; 399 num_ghost_interface_ranks++; 400 } 401 for (PetscInt i = 0; i < niranks; i++) { 402 if (iranks[i] == rank) continue; 403 num_remote_leaves_total += ioffset[i + 1] - ioffset[i]; 404 num_owned_interface_ranks++; 405 } 406 } 407 PetscCallMPI(MPI_Gather(&num_remote_roots_total, 1, MPIU_INT, gather_buffer, 1, MPIU_INT, 0, comm)); 408 if (!rank) { 409 PetscCall(PetscSortInt(comm_size, gather_buffer)); 410 part_boundary_dofs[0] = gather_buffer[0]; // min 411 part_boundary_dofs[1] = gather_buffer[comm_size - 1]; // max 412 part_boundary_dofs[2] = gather_buffer[median_index]; // median 413 PetscReal part_shared_dof_ratio = (PetscReal)part_boundary_dofs[1] / (PetscReal)part_boundary_dofs[2]; 414 PetscCall(PetscPrintf( 415 comm, " Ghost Interface %" PetscInt_FMT "-DoF nodes : %" PetscInt_FMT ", %" PetscInt_FMT ", %" PetscInt_FMT ", %f\n", 416 num_comp_q, part_boundary_dofs[0] / num_comp_q, part_boundary_dofs[1] / num_comp_q, part_boundary_dofs[2] / num_comp_q, 417 part_shared_dof_ratio)); 418 } 419 420 PetscCallMPI(MPI_Gather(&num_ghost_interface_ranks, 1, MPIU_INT, gather_buffer, 1, MPIU_INT, 0, comm)); 421 if (!rank) { 422 PetscCall(PetscSortInt(comm_size, gather_buffer)); 423 part_neighbors[0] = gather_buffer[0]; // min 424 part_neighbors[1] = gather_buffer[comm_size - 1]; // max 425 part_neighbors[2] = gather_buffer[median_index]; // median 426 PetscReal part_neighbors_ratio = (PetscReal)part_neighbors[1] / (PetscReal)part_neighbors[2]; 427 PetscCall(PetscPrintf(comm, " Ghost Interface Ranks : %" PetscInt_FMT ", %" PetscInt_FMT ", %" PetscInt_FMT ", %f\n", 428 part_neighbors[0], part_neighbors[1], part_neighbors[2], part_neighbors_ratio)); 429 } 430 431 PetscCallMPI(MPI_Gather(&num_remote_leaves_total, 1, MPIU_INT, gather_buffer, 1, MPIU_INT, 0, comm)); 432 if (!rank) { 433 PetscCall(PetscSortInt(comm_size, gather_buffer)); 434 part_boundary_dofs[0] = gather_buffer[0]; // min 435 part_boundary_dofs[1] = gather_buffer[comm_size - 1]; // max 436 part_boundary_dofs[2] = gather_buffer[median_index]; // median 437 PetscReal part_shared_dof_ratio = (PetscReal)part_boundary_dofs[1] / (PetscReal)part_boundary_dofs[2]; 438 PetscCall(PetscPrintf( 439 comm, " Owned Interface %" PetscInt_FMT "-DoF nodes : %" PetscInt_FMT ", %" PetscInt_FMT ", %" PetscInt_FMT ", %f\n", 440 num_comp_q, part_boundary_dofs[0] / num_comp_q, part_boundary_dofs[1] / num_comp_q, part_boundary_dofs[2] / num_comp_q, 441 part_shared_dof_ratio)); 442 } 443 444 PetscCallMPI(MPI_Gather(&num_owned_interface_ranks, 1, MPIU_INT, gather_buffer, 1, MPIU_INT, 0, comm)); 445 if (!rank) { 446 PetscCall(PetscSortInt(comm_size, gather_buffer)); 447 part_neighbors[0] = gather_buffer[0]; // min 448 part_neighbors[1] = gather_buffer[comm_size - 1]; // max 449 part_neighbors[2] = gather_buffer[median_index]; // median 450 PetscReal part_neighbors_ratio = (PetscReal)part_neighbors[1] / (PetscReal)part_neighbors[2]; 451 PetscCall(PetscPrintf(comm, " Owned Interface Ranks : %" PetscInt_FMT ", %" PetscInt_FMT ", %" PetscInt_FMT ", %f\n", 452 part_neighbors[0], part_neighbors[1], part_neighbors[2], part_neighbors_ratio)); 453 } 454 } 455 456 if (!rank) PetscCall(PetscFree(gather_buffer)); 457 } 458 PetscFunctionReturn(PETSC_SUCCESS); 459 } 460