1 // SPDX-FileCopyrightText: Copyright (c) 2017-2024, HONEE contributors. 2 // SPDX-License-Identifier: Apache-2.0 OR BSD-2-Clause 3 4 /// @file 5 /// Utilities for setting up DM and PetscFE 6 7 #include <ceed.h> 8 #include <dm-utils.h> 9 #include <petsc-ceed-utils.h> 10 #include <petscdmplex.h> 11 #include <petscds.h> 12 13 /** 14 @brief Convert `DM` field to `DS` field. 15 16 @param[in] dm `DM` holding mesh 17 @param[in] domain_label Label for `DM` domain 18 @param[in] dm_field Index of `DM` field 19 @param[out] ds_field Index of `DS` field 20 21 @return An error code: 0 - success, otherwise - failure 22 **/ 23 PetscErrorCode DMFieldToDSField(DM dm, DMLabel domain_label, PetscInt dm_field, PetscInt *ds_field) { 24 PetscDS ds; 25 IS field_is; 26 const PetscInt *fields; 27 PetscInt num_fields; 28 29 PetscFunctionBeginUser; 30 PetscCall(DMGetRegionDS(dm, domain_label, &field_is, &ds, NULL)); 31 PetscCall(ISGetIndices(field_is, &fields)); 32 PetscCall(ISGetSize(field_is, &num_fields)); 33 for (PetscInt i = 0; i < num_fields; i++) { 34 if (dm_field == fields[i]) { 35 *ds_field = i; 36 break; 37 } 38 } 39 PetscCall(ISRestoreIndices(field_is, &fields)); 40 41 PetscCheck(*ds_field != -1, PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Could not find dm_field %" PetscInt_FMT " in DS", dm_field); 42 PetscFunctionReturn(PETSC_SUCCESS); 43 } 44 45 /** 46 @brief Destroy `ElemRestriction` in a `PetscContainer`. 47 48 Not collective across MPI processes. 49 50 @param[in,out] ctx `CeedElemRestriction` 51 52 @return An error code: 0 - success, otherwise - failure 53 **/ 54 static PetscErrorCode DMPlexCeedElemRestrictionDestroy(void **ctx) { 55 CeedElemRestriction rstr = *(CeedElemRestriction *)ctx; 56 57 PetscFunctionBegin; 58 PetscCallCeed(CeedElemRestrictionReturnCeed(rstr), CeedElemRestrictionDestroy(&rstr)); 59 PetscFunctionReturn(PETSC_SUCCESS); 60 } 61 62 /** 63 @brief Create `CeedElemRestriction` from `DMPlex`. 64 65 The `CeedElemRestriction` is stored in the `DMPlex` object for reuse; 66 if the routine is called again with the same arguments, the same `CeedElemRestriction` is returned. 67 68 Not collective across MPI processes. 69 70 @param[in] ceed `Ceed` context 71 @param[in] dm `DMPlex` holding mesh 72 @param[in] domain_label `DMLabel` for `DMPlex` domain 73 @param[in] label_value Stratum value 74 @param[in] height Height of `DMPlex` topology 75 @param[in] dm_field Index of `DMPlex` field 76 @param[out] restriction `CeedElemRestriction` for `DMPlex` 77 78 @return An error code: 0 - success, otherwise - failure 79 **/ 80 PetscErrorCode DMPlexCeedElemRestrictionCreate(Ceed ceed, DM dm, DMLabel domain_label, PetscInt label_value, PetscInt height, PetscInt dm_field, 81 CeedElemRestriction *restriction) { 82 char container_name[1024]; 83 CeedElemRestriction container_restriction; 84 85 PetscFunctionBeginUser; 86 { 87 const char *label_name = NULL; 88 89 if (domain_label) PetscCall(PetscObjectGetName((PetscObject)domain_label, &label_name)); 90 PetscCall(PetscSNPrintf(container_name, sizeof(container_name), 91 "DM CeedElemRestriction - label: %s label value: %" PetscInt_FMT " height: %" PetscInt_FMT " DM field: %" PetscInt_FMT, 92 label_name ? label_name : "NONE", label_value, height, dm_field)); 93 } 94 PetscCall(PetscObjectContainerQuery((PetscObject)dm, container_name, (void **)&container_restriction)); 95 96 if (container_restriction) { 97 // Copy existing restriction 98 *restriction = NULL; 99 PetscCallCeed(ceed, CeedElemRestrictionReferenceCopy(container_restriction, restriction)); 100 } else { 101 PetscInt num_elem, elem_size, num_dof, num_comp, *restriction_offsets_petsc; 102 CeedInt *restriction_offsets_ceed = NULL; 103 104 // Build restriction 105 PetscCall(DMPlexGetLocalOffsets(dm, domain_label, label_value, height, dm_field, &num_elem, &elem_size, &num_comp, &num_dof, 106 &restriction_offsets_petsc)); 107 PetscCall(IntArrayPetscToCeed(num_elem * elem_size, &restriction_offsets_petsc, &restriction_offsets_ceed)); 108 PetscCallCeed(ceed, CeedElemRestrictionCreate(ceed, num_elem, elem_size, num_comp, 1, num_dof, CEED_MEM_HOST, CEED_COPY_VALUES, 109 restriction_offsets_ceed, restriction)); 110 PetscCall(PetscFree(restriction_offsets_ceed)); 111 112 // Set in container 113 PetscCallCeed(ceed, CeedElemRestrictionReferenceCopy(*restriction, &container_restriction)); 114 PetscCall(PetscObjectContainerCompose((PetscObject)dm, container_name, container_restriction, DMPlexCeedElemRestrictionDestroy)); 115 } 116 PetscFunctionReturn(PETSC_SUCCESS); 117 } 118 119 /** 120 @brief Create `CeedElemRestriction` from `DMPlex` domain for mesh coordinates. 121 122 The `CeedElemRestriction` is stored in the coordinate `DMPlex` object for reuse; 123 if the routine is called again with the same arguments, the same `CeedElemRestriction` is returned. 124 125 Not collective across MPI processes. 126 127 @param[in] ceed `Ceed` context 128 @param[in] dm `DMPlex` holding mesh 129 @param[in] domain_label Label for `DMPlex` domain 130 @param[in] label_value Stratum value 131 @param[in] height Height of `DMPlex` topology 132 @param[out] restriction `CeedElemRestriction` for mesh 133 134 @return An error code: 0 - success, otherwise - failure 135 **/ 136 PetscErrorCode DMPlexCeedElemRestrictionCoordinateCreate(Ceed ceed, DM dm, DMLabel domain_label, PetscInt label_value, PetscInt height, 137 CeedElemRestriction *restriction) { 138 DM dm_coord; 139 140 PetscFunctionBeginUser; 141 PetscCall(DMGetCellCoordinateDM(dm, &dm_coord)); 142 if (!dm_coord) { 143 PetscCall(DMGetCoordinateDM(dm, &dm_coord)); 144 } 145 PetscCall(DMPlexCeedElemRestrictionCreate(ceed, dm_coord, domain_label, label_value, height, 0, restriction)); 146 PetscFunctionReturn(PETSC_SUCCESS); 147 } 148 149 /** 150 @brief Create `CeedElemRestriction` from `DMPlex` domain for auxilury `QFunction` data. 151 152 Not collective across MPI processes. 153 154 @param[in] ceed `Ceed` context 155 @param[in] dm `DMPlex` holding mesh 156 @param[in] domain_label Label for `DMPlex` domain 157 @param[in] label_value Stratum value 158 @param[in] height Height of `DMPlex` topology 159 @param[in] q_data_size Number of components for `QFunction` data 160 @param[in] is_collocated Boolean flag indicating if the data is collocated on the nodes (`PETSC_TRUE`) or on quadrature points (`PETSC_FALSE`) 161 @param[out] restriction Strided `CeedElemRestriction` for `QFunction` data 162 163 @return An error code: 0 - success, otherwise - failure 164 **/ 165 static PetscErrorCode DMPlexCeedElemRestrictionStridedCreate(Ceed ceed, DM dm, DMLabel domain_label, PetscInt label_value, PetscInt height, 166 PetscInt q_data_size, PetscBool is_collocated, CeedElemRestriction *restriction) { 167 PetscInt num_elem, num_qpts, dm_field = 0; 168 169 PetscFunctionBeginUser; 170 { // Get number of elements 171 PetscInt depth; 172 DMLabel depth_label; 173 IS point_is, depth_is; 174 175 PetscCall(DMPlexGetDepth(dm, &depth)); 176 PetscCall(DMPlexGetDepthLabel(dm, &depth_label)); 177 PetscCall(DMLabelGetStratumIS(depth_label, depth - height, &depth_is)); 178 if (domain_label) { 179 IS domain_is; 180 181 PetscCall(DMLabelGetStratumIS(domain_label, label_value, &domain_is)); 182 if (domain_is) { 183 PetscCall(ISIntersect(depth_is, domain_is, &point_is)); 184 PetscCall(ISDestroy(&domain_is)); 185 } else { 186 point_is = NULL; 187 } 188 PetscCall(ISDestroy(&depth_is)); 189 } else { 190 point_is = depth_is; 191 } 192 if (point_is) { 193 PetscCall(ISGetLocalSize(point_is, &num_elem)); 194 } else { 195 num_elem = 0; 196 } 197 PetscCall(ISDestroy(&point_is)); 198 } 199 200 { // Get number of quadrature points 201 PetscDS ds; 202 PetscFE fe; 203 PetscInt ds_field = -1; 204 205 PetscCall(DMGetRegionDS(dm, domain_label, NULL, &ds, NULL)); 206 PetscCall(DMFieldToDSField(dm, domain_label, dm_field, &ds_field)); 207 PetscCall(PetscDSGetDiscretization(ds, ds_field, (PetscObject *)&fe)); 208 PetscCall(PetscFEGetHeightSubspace(fe, height, &fe)); 209 if (is_collocated) { 210 PetscDualSpace dual_space; 211 PetscInt num_dual_basis_vectors, dim, num_comp; 212 213 PetscCall(PetscFEGetSpatialDimension(fe, &dim)); 214 PetscCall(PetscFEGetNumComponents(fe, &num_comp)); 215 PetscCall(PetscFEGetDualSpace(fe, &dual_space)); 216 PetscCall(PetscDualSpaceGetDimension(dual_space, &num_dual_basis_vectors)); 217 num_qpts = num_dual_basis_vectors / num_comp; 218 } else { 219 PetscQuadrature quadrature; 220 221 PetscCall(DMGetRegionDS(dm, domain_label, NULL, &ds, NULL)); 222 PetscCall(DMFieldToDSField(dm, domain_label, dm_field, &ds_field)); 223 PetscCall(PetscDSGetDiscretization(ds, ds_field, (PetscObject *)&fe)); 224 PetscCall(PetscFEGetHeightSubspace(fe, height, &fe)); 225 PetscCall(PetscFEGetQuadrature(fe, &quadrature)); 226 PetscCall(PetscQuadratureGetData(quadrature, NULL, NULL, &num_qpts, NULL, NULL)); 227 } 228 } 229 230 // Create the restriction 231 PetscCallCeed(ceed, CeedElemRestrictionCreateStrided(ceed, num_elem, num_qpts, q_data_size, num_elem * num_qpts * q_data_size, CEED_STRIDES_BACKEND, 232 restriction)); 233 PetscFunctionReturn(PETSC_SUCCESS); 234 } 235 236 /** 237 @brief Create `CeedElemRestriction` from `DMPlex` domain for auxilury `QFunction` data. 238 239 Not collective across MPI processes. 240 241 @param[in] ceed `Ceed` context 242 @param[in] dm `DMPlex` holding mesh 243 @param[in] domain_label Label for `DMPlex` domain 244 @param[in] label_value Stratum value 245 @param[in] height Height of `DMPlex` topology 246 @param[in] q_data_size Number of components for `QFunction` data 247 @param[out] restriction Strided `CeedElemRestriction` for `QFunction` data 248 249 @return An error code: 0 - success, otherwise - failure 250 **/ 251 PetscErrorCode DMPlexCeedElemRestrictionQDataCreate(Ceed ceed, DM dm, DMLabel domain_label, PetscInt label_value, PetscInt height, 252 PetscInt q_data_size, CeedElemRestriction *restriction) { 253 PetscFunctionBeginUser; 254 PetscCall(DMPlexCeedElemRestrictionStridedCreate(ceed, dm, domain_label, label_value, height, q_data_size, PETSC_FALSE, restriction)); 255 PetscFunctionReturn(PETSC_SUCCESS); 256 } 257 258 /** 259 @brief Create `CeedElemRestriction` from `DMPlex` domain for nodally collocated auxilury `QFunction` data. 260 261 Not collective across MPI processes. 262 263 @param[in] ceed `Ceed` context 264 @param[in] dm `DMPlex` holding mesh 265 @param[in] domain_label Label for `DMPlex` domain 266 @param[in] label_value Stratum value 267 @param[in] height Height of `DMPlex` topology 268 @param[in] q_data_size Number of components for `QFunction` data 269 @param[out] restriction Strided `CeedElemRestriction` for `QFunction` data 270 271 @return An error code: 0 - success, otherwise - failure 272 **/ 273 PetscErrorCode DMPlexCeedElemRestrictionCollocatedCreate(Ceed ceed, DM dm, DMLabel domain_label, PetscInt label_value, PetscInt height, 274 PetscInt q_data_size, CeedElemRestriction *restriction) { 275 PetscFunctionBeginUser; 276 PetscCall(DMPlexCeedElemRestrictionStridedCreate(ceed, dm, domain_label, label_value, height, q_data_size, PETSC_TRUE, restriction)); 277 PetscFunctionReturn(PETSC_SUCCESS); 278 } 279 280 /** 281 @brief Creates a tensor-product uniform quadrature. 282 This is only for comparison studies and generally should not be used. 283 284 @param[in] dim Spatial dimension 285 @param[in] num_comp Number of components 286 @param[in] num_points Number of points in one dimension 287 @param[in] a Left end of interval (often -1) 288 @param[in] b Right end of interval (often +1) 289 @param[out] q `PetscQuadrature` object 290 291 @return An error code: 0 - success, otherwise - failure 292 **/ 293 static PetscErrorCode PetscDTUniformTensorQuadrature(PetscInt dim, PetscInt num_comp, PetscInt num_points, PetscReal a, PetscReal b, 294 PetscQuadrature *q) { 295 DMPolytopeType ct; 296 PetscInt num_total_points = dim > 1 ? (dim > 2 ? (num_points * num_points * num_points) : (num_points * num_points)) : num_points; 297 PetscReal *coords, *weights, *coords_1d; 298 299 PetscFunctionBeginUser; 300 PetscCall(PetscMalloc1(num_total_points * dim, &coords)); 301 PetscCall(PetscMalloc1(num_total_points * num_comp, &weights)); 302 // Compute weights, points 303 switch (dim) { 304 case 0: 305 ct = DM_POLYTOPE_POINT; 306 PetscCall(PetscFree(coords)); 307 PetscCall(PetscFree(weights)); 308 PetscCall(PetscMalloc1(1, &coords)); 309 PetscCall(PetscMalloc1(num_comp, &weights)); 310 num_total_points = 1; 311 coords[0] = 0.0; 312 for (PetscInt c = 0; c < num_comp; c++) weights[c] = 1.0; 313 break; 314 case 1: { 315 ct = DM_POLYTOPE_SEGMENT; 316 PetscReal step = (b - a) / num_points; 317 318 for (PetscInt i = 0; i < num_points; i++) { 319 coords[i] = step * (i + 0.5) + a; 320 for (PetscInt c = 0; c < num_comp; c++) weights[i * num_comp + c] = 1.0; 321 } 322 } break; 323 case 2: { 324 ct = DM_POLYTOPE_QUADRILATERAL; 325 PetscCall(PetscMalloc1(num_points, &coords_1d)); 326 PetscReal step = (b - a) / num_points; 327 328 for (PetscInt i = 0; i < num_points; i++) coords_1d[i] = step * (i + 0.5) + a; 329 for (PetscInt i = 0; i < num_points; i++) { 330 for (PetscInt j = 0; j < num_points; j++) { 331 coords[(i * num_points + j) * dim + 0] = coords_1d[i]; 332 coords[(i * num_points + j) * dim + 1] = coords_1d[j]; 333 for (PetscInt c = 0; c < num_comp; c++) weights[(i * num_points + j) * num_comp + c] = 1.0; 334 } 335 } 336 PetscCall(PetscFree(coords_1d)); 337 } break; 338 case 3: { 339 ct = DM_POLYTOPE_HEXAHEDRON; 340 PetscCall(PetscMalloc1(num_points, &coords_1d)); 341 PetscReal step = (b - a) / num_points; 342 343 for (PetscInt i = 0; i < num_points; i++) coords_1d[i] = step * (i + 0.5) + a; 344 for (PetscInt i = 0; i < num_points; i++) { 345 for (PetscInt j = 0; j < num_points; j++) { 346 for (PetscInt k = 0; k < num_points; k++) { 347 coords[((i * num_points + j) * num_points + k) * dim + 0] = coords_1d[i]; 348 coords[((i * num_points + j) * num_points + k) * dim + 1] = coords_1d[j]; 349 coords[((i * num_points + j) * num_points + k) * dim + 2] = coords_1d[k]; 350 for (PetscInt c = 0; c < num_comp; c++) weights[((i * num_points + j) * num_points + k) * num_comp + c] = 1.0; 351 } 352 } 353 } 354 PetscCall(PetscFree(coords_1d)); 355 } break; 356 // LCOV_EXCL_START 357 default: 358 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Cannot construct quadrature rule for dimension %" PetscInt_FMT, dim); 359 // LCOV_EXCL_STOP 360 } 361 PetscCall(PetscQuadratureCreate(PETSC_COMM_SELF, q)); 362 PetscCall(PetscQuadratureSetCellType(*q, ct)); 363 PetscCall(PetscQuadratureSetOrder(*q, num_points)); 364 PetscCall(PetscQuadratureSetData(*q, dim, num_comp, num_total_points, coords, weights)); 365 PetscCall(PetscObjectChangeTypeName((PetscObject)*q, "UniformTensor")); 366 PetscFunctionReturn(PETSC_SUCCESS); 367 } 368 369 /** 370 @brief Setup `DM` with FE space of appropriate degree 371 372 @param[in] comm MPI communicator 373 @param[in] dim Spatial dimension 374 @param[in] num_comp Number of components 375 @param[in] is_simplex Flag for simplex or tensor product element 376 @param[in] order Polynomial order of space 377 @param[in] q_order Quadrature order 378 @param[in] prefix The options prefix, or `NULL` 379 @param[out] fem `PetscFE` object 380 381 @return An error code: 0 - success, otherwise - failure 382 **/ 383 PetscErrorCode PetscFECreateLagrangeFromOptions(MPI_Comm comm, PetscInt dim, PetscInt num_comp, PetscBool is_simplex, PetscInt order, 384 PetscInt q_order, const char prefix[], PetscFE *fem) { 385 PetscBool is_tensor = !is_simplex; 386 DMPolytopeType polytope_type = DMPolytopeTypeSimpleShape(dim, is_simplex); 387 PetscSpace fe_space; 388 PetscDualSpace fe_dual_space; 389 PetscQuadrature quadrature, face_quadrature; 390 391 PetscFunctionBeginUser; 392 // Create space 393 PetscCall(PetscSpaceCreate(comm, &fe_space)); 394 PetscCall(PetscSpaceSetType(fe_space, PETSCSPACEPOLYNOMIAL)); 395 PetscCall(PetscObjectSetOptionsPrefix((PetscObject)fe_space, prefix)); 396 PetscCall(PetscSpacePolynomialSetTensor(fe_space, is_tensor)); 397 PetscCall(PetscSpaceSetNumComponents(fe_space, num_comp)); 398 PetscCall(PetscSpaceSetNumVariables(fe_space, dim)); 399 if (order >= 0) { 400 PetscCall(PetscSpaceSetDegree(fe_space, order, PETSC_DETERMINE)); 401 if (polytope_type == DM_POLYTOPE_TRI_PRISM || polytope_type == DM_POLYTOPE_TRI_PRISM_TENSOR) { 402 PetscSpace fe_space_end, fe_space_side; 403 404 PetscCall(PetscSpaceSetNumComponents(fe_space, 1)); 405 PetscCall(PetscSpaceCreate(comm, &fe_space_end)); 406 PetscCall(PetscSpaceSetType(fe_space_end, PETSCSPACEPOLYNOMIAL)); 407 PetscCall(PetscSpacePolynomialSetTensor(fe_space_end, PETSC_FALSE)); 408 PetscCall(PetscSpaceSetNumComponents(fe_space_end, 1)); 409 PetscCall(PetscSpaceSetNumVariables(fe_space_end, dim - 1)); 410 PetscCall(PetscSpaceSetDegree(fe_space_end, order, PETSC_DETERMINE)); 411 PetscCall(PetscSpaceCreate(comm, &fe_space_side)); 412 PetscCall(PetscSpaceSetType(fe_space_side, PETSCSPACEPOLYNOMIAL)); 413 PetscCall(PetscSpacePolynomialSetTensor(fe_space_side, PETSC_FALSE)); 414 PetscCall(PetscSpaceSetNumComponents(fe_space_side, 1)); 415 PetscCall(PetscSpaceSetNumVariables(fe_space_side, 1)); 416 PetscCall(PetscSpaceSetDegree(fe_space_side, order, PETSC_DETERMINE)); 417 PetscCall(PetscSpaceSetType(fe_space, PETSCSPACETENSOR)); 418 PetscCall(PetscSpaceTensorSetNumSubspaces(fe_space, 2)); 419 PetscCall(PetscSpaceTensorSetSubspace(fe_space, 0, fe_space_end)); 420 PetscCall(PetscSpaceTensorSetSubspace(fe_space, 1, fe_space_side)); 421 PetscCall(PetscSpaceDestroy(&fe_space_end)); 422 PetscCall(PetscSpaceDestroy(&fe_space_side)); 423 424 if (num_comp > 1) { 425 PetscSpace scalar_fe_space = fe_space; 426 427 PetscCall(PetscSpaceCreate(comm, &fe_space)); 428 PetscCall(PetscSpaceSetNumVariables(fe_space, dim)); 429 PetscCall(PetscSpaceSetNumComponents(fe_space, num_comp)); 430 PetscCall(PetscSpaceSetType(fe_space, PETSCSPACESUM)); 431 PetscCall(PetscSpaceSumSetNumSubspaces(fe_space, num_comp)); 432 PetscCall(PetscSpaceSumSetConcatenate(fe_space, PETSC_TRUE)); 433 PetscCall(PetscSpaceSumSetInterleave(fe_space, PETSC_TRUE, PETSC_FALSE)); 434 for (PetscInt i = 0; i < num_comp; i++) PetscCall(PetscSpaceSumSetSubspace(fe_space, i, scalar_fe_space)); 435 PetscCall(PetscSpaceDestroy(&scalar_fe_space)); 436 } 437 } 438 } 439 PetscCall(PetscSpaceSetFromOptions(fe_space)); 440 PetscCall(PetscSpaceSetUp(fe_space)); 441 PetscCall(PetscSpaceGetDegree(fe_space, &order, NULL)); 442 PetscCall(PetscSpacePolynomialGetTensor(fe_space, &is_tensor)); 443 PetscCall(PetscSpaceGetNumComponents(fe_space, &num_comp)); 444 445 // Create dual space 446 PetscCall(PetscDualSpaceCreate(comm, &fe_dual_space)); 447 PetscCall(PetscDualSpaceSetType(fe_dual_space, PETSCDUALSPACELAGRANGE)); 448 PetscCall(PetscObjectSetOptionsPrefix((PetscObject)fe_dual_space, prefix)); 449 { 450 DM dual_space_dm; 451 452 PetscCall(DMPlexCreateReferenceCell(PETSC_COMM_SELF, polytope_type, &dual_space_dm)); 453 PetscCall(PetscDualSpaceSetDM(fe_dual_space, dual_space_dm)); 454 PetscCall(DMDestroy(&dual_space_dm)); 455 } 456 PetscCall(PetscDualSpaceSetNumComponents(fe_dual_space, num_comp)); 457 PetscCall(PetscDualSpaceSetOrder(fe_dual_space, order)); 458 PetscCall(PetscDualSpaceLagrangeSetTensor(fe_dual_space, (is_tensor || (polytope_type == DM_POLYTOPE_TRI_PRISM)) ? PETSC_TRUE : PETSC_FALSE)); 459 PetscCall(PetscDualSpaceSetFromOptions(fe_dual_space)); 460 PetscCall(PetscDualSpaceSetUp(fe_dual_space)); 461 462 // Create quadrature 463 q_order = q_order >= 0 ? q_order : order; 464 PetscBool use_uniform = PETSC_FALSE; 465 466 PetscOptionsBegin(comm, NULL, "Uniform quadrature check", NULL); 467 PetscCall(PetscOptionsBool("-petscdt_uniform_tensor_quadrature", "", NULL, use_uniform, &use_uniform, NULL)); 468 PetscOptionsEnd(); 469 PetscCheck(!use_uniform || (is_tensor || dim == 1), comm, PETSC_ERR_SUP, "Can only use uniform quadrature on tensor product elements"); 470 if (use_uniform) { 471 PetscCall(PetscDTUniformTensorQuadrature(dim, 1, q_order + 1, -1.0, 1.0, &quadrature)); 472 PetscCall(PetscDTUniformTensorQuadrature(dim - 1, 1, q_order + 1, -1.0, 1.0, &face_quadrature)); 473 } else { 474 PetscCall(PetscDTCreateDefaultQuadrature(polytope_type, q_order, &quadrature, &face_quadrature)); 475 } 476 477 // Create finite element 478 PetscCall(PetscFECreateFromSpaces(fe_space, fe_dual_space, quadrature, face_quadrature, fem)); 479 PetscCall(PetscFESetFromOptions(*fem)); 480 PetscFunctionReturn(PETSC_SUCCESS); 481 } 482 483 /** 484 @brief Get global `DMPlex` topology type. 485 486 Collective across MPI processes. 487 488 @param[in] dm `DMPlex` holding mesh 489 @param[in] domain_label `DMLabel` for `DMPlex` domain 490 @param[in] label_value Stratum value 491 @param[in] height Height of `DMPlex` topology 492 @param[out] cell_type `DMPlex` topology type 493 **/ 494 static inline PetscErrorCode GetGlobalDMPlexPolytopeType(DM dm, DMLabel domain_label, PetscInt label_value, PetscInt height, 495 DMPolytopeType *cell_type) { 496 PetscInt first_point; 497 PetscInt ids[1] = {label_value}; 498 DMLabel depth_label; 499 500 PetscFunctionBeginUser; 501 // Use depth label if no domain label present 502 if (!domain_label) { 503 PetscInt depth; 504 505 PetscCall(DMPlexGetDepth(dm, &depth)); 506 PetscCall(DMPlexGetDepthLabel(dm, &depth_label)); 507 ids[0] = depth - height; 508 } 509 510 // Get cell interp, grad, and quadrature data 511 PetscCall(DMGetFirstLabeledPoint(dm, dm, domain_label ? domain_label : depth_label, 1, ids, height, &first_point, NULL)); 512 if (first_point != -1) PetscCall(DMPlexGetCellType(dm, first_point, cell_type)); 513 514 { // Form agreement about CellType 515 PetscInt cell_type_local = -1, cell_type_global = -1; 516 517 if (first_point != -1) cell_type_local = (PetscInt)*cell_type; 518 PetscCallMPI(MPIU_Allreduce(&cell_type_local, &cell_type_global, 1, MPIU_INT, MPI_MAX, PetscObjectComm((PetscObject)dm))); 519 *cell_type = (DMPolytopeType)cell_type_global; 520 } 521 PetscFunctionReturn(PETSC_SUCCESS); 522 } 523 524 /** 525 @brief Get the permutation and field offset for a given depth. 526 527 Not collective across MPI processes. 528 529 @param[in] dm `DMPlex` holding mesh 530 @param[in] depth Depth of `DMPlex` topology 531 @param[in] field Index of `DMPlex` field 532 @param[out] permutation Permutation for `DMPlex` field 533 @param[out] field_offset Offset to `DMPlex` field 534 **/ 535 static inline PetscErrorCode GetClosurePermutationAndFieldOffsetAtDepth(DM dm, PetscInt depth, PetscInt field, IS *permutation, 536 PetscInt *field_offset) { 537 PetscBool is_field_continuous = PETSC_TRUE; 538 PetscInt dim, num_fields, offset = 0, size = 0; 539 PetscSection section; 540 541 PetscFunctionBeginUser; 542 *field_offset = 0; 543 544 PetscCall(DMGetDimension(dm, &dim)); 545 PetscCall(DMGetLocalSection(dm, §ion)); 546 PetscCall(PetscSectionGetNumFields(section, &num_fields)); 547 // ---- Permutation size and offset to current field 548 for (PetscInt f = 0; f < num_fields; f++) { 549 PetscBool is_continuous; 550 PetscInt num_components, num_dof_1d, dual_space_size, field_size; 551 PetscObject obj; 552 PetscFE fe = NULL; 553 PetscDualSpace dual_space; 554 555 PetscCall(PetscSectionGetFieldComponents(section, f, &num_components)); 556 PetscCall(DMGetField(dm, f, NULL, &obj)); 557 fe = (PetscFE)obj; 558 PetscCall(PetscFEGetDualSpace(fe, &dual_space)); 559 PetscCall(PetscDualSpaceLagrangeGetContinuity(dual_space, &is_continuous)); 560 if (f == field) is_field_continuous = is_continuous; 561 if (!is_continuous) continue; 562 PetscCall(PetscDualSpaceGetDimension(dual_space, &dual_space_size)); 563 num_dof_1d = (PetscInt)PetscCeilReal(PetscPowReal(dual_space_size / num_components, 1.0 / dim)); 564 field_size = PetscPowInt(num_dof_1d, depth) * num_components; 565 if (f < field) offset += field_size; 566 size += field_size; 567 } 568 569 if (is_field_continuous) { 570 *field_offset = offset; 571 PetscCall(PetscSectionGetClosurePermutation(section, (PetscObject)dm, depth, size, permutation)); 572 } 573 PetscFunctionReturn(PETSC_SUCCESS); 574 } 575 576 /** 577 @brief Compute field tabulation from `PetscTabulation`. 578 579 Not collective across MPI processes. 580 581 @param[in] dm `DMPlex` holding mesh 582 @param[in] field Index of `DMPlex` field 583 @param[in] face Index of basis face, or 0 584 @param[in] tabulation PETSc basis tabulation 585 @param[out] interp Interpolation matrix in libCEED orientation 586 @param[out] grad Gradient matrix in libCEED orientation 587 588 @note `interp` and `grad` are allocated using `PetscCalloc1` and must be freed by the user. 589 **/ 590 static inline PetscErrorCode ComputeFieldTabulationP2C(DM dm, PetscInt field, PetscInt face, PetscTabulation tabulation, CeedScalar **interp, 591 CeedScalar **grad) { 592 PetscBool is_simplex, has_permutation = PETSC_FALSE; 593 PetscInt field_offset = 0, num_comp, P, Q, dim; 594 const PetscInt *permutation_indices; 595 IS permutation = NULL; 596 597 PetscFunctionBeginUser; 598 num_comp = tabulation->Nc; 599 P = tabulation->Nb / num_comp; 600 Q = tabulation->Np; 601 dim = tabulation->cdim; 602 603 PetscCall(PetscCalloc1(P * Q, interp)); 604 PetscCall(PetscCalloc1(P * Q * dim, grad)); 605 606 PetscCall(DMPlexIsSimplex(dm, &is_simplex)); 607 if (!is_simplex) { 608 PetscCall(GetClosurePermutationAndFieldOffsetAtDepth(dm, dim, field, &permutation, &field_offset)); 609 has_permutation = !!permutation; 610 if (has_permutation) PetscCall(ISGetIndices(permutation, &permutation_indices)); 611 } 612 613 const CeedInt c = 0; 614 for (CeedInt q = 0; q < Q; q++) { 615 for (CeedInt p_ceed = 0; p_ceed < P; p_ceed++) { 616 CeedInt p_petsc = has_permutation ? (permutation_indices[field_offset + p_ceed * num_comp] - field_offset) : (p_ceed * num_comp); 617 (*interp)[q * P + p_ceed] = tabulation->T[0][((face * Q + q) * P * num_comp + p_petsc) * num_comp + c]; 618 for (CeedInt d = 0; d < dim; d++) { 619 (*grad)[(d * Q + q) * P + p_ceed] = tabulation->T[1][(((face * Q + q) * P * num_comp + p_petsc) * num_comp + c) * dim + d]; 620 } 621 } 622 } 623 624 if (has_permutation) PetscCall(ISRestoreIndices(permutation, &permutation_indices)); 625 PetscCall(ISDestroy(&permutation)); 626 PetscFunctionReturn(PETSC_SUCCESS); 627 } 628 629 /** 630 @brief Get quadrature data from `PetscQuadrature` for use with libCEED. 631 632 Not collective across MPI processes. 633 634 @param[in] fe `PetscFE` object 635 @param[in] quadrature PETSc basis quadrature 636 @param[out] q_dim Quadrature dimension, or NULL if not needed 637 @param[out] num_comp Number of components, or NULL if not needed 638 @param[out] Q Number of quadrature points, or NULL if not needed 639 @param[out] q_points Quadrature points in libCEED orientation, or NULL if not needed 640 @param[out] q_weights Quadrature weights in libCEED orientation, or NULL if not needed 641 642 @note `q_weights` and `q_points` are allocated using `PetscCalloc1` and must be freed by the user. 643 **/ 644 static inline PetscErrorCode GetQuadratureDataP2C(PetscFE fe, PetscQuadrature quadrature, PetscInt *q_dim, PetscInt *num_comp, PetscInt *Q, 645 CeedScalar **q_points, CeedScalar **q_weights) { 646 PetscInt spatial_dim, dim, num_comp_quadrature, num_q_points; 647 const PetscReal *q_points_petsc, *q_weights_petsc; 648 649 PetscFunctionBeginUser; 650 PetscCall(PetscFEGetSpatialDimension(fe, &spatial_dim)); 651 PetscCall(PetscQuadratureGetData(quadrature, &dim, &num_comp_quadrature, &num_q_points, &q_points_petsc, &q_weights_petsc)); 652 if (q_dim) *q_dim = dim; 653 if (num_comp) *num_comp = num_comp_quadrature; 654 if (Q) *Q = num_q_points; 655 if (q_weights) { 656 PetscSizeT q_weights_size = num_q_points; 657 658 PetscCall(PetscCalloc1(q_weights_size, q_weights)); 659 for (CeedInt i = 0; i < num_q_points; i++) (*q_weights)[i] = q_weights_petsc[i]; 660 } 661 if (q_points) { 662 PetscSizeT q_points_size = num_q_points * spatial_dim; 663 664 PetscCall(PetscCalloc1(q_points_size, q_points)); 665 for (CeedInt i = 0; i < num_q_points; i++) { 666 for (CeedInt d = 0; d < dim; d++) (*q_points)[i * spatial_dim + d] = q_points_petsc[i * dim + d]; 667 } 668 } 669 PetscFunctionReturn(PETSC_SUCCESS); 670 } 671 672 /** 673 @brief Create 1D tabulation from `PetscFE`. 674 675 Collective across MPI processes. 676 677 @note `q_weights` and `q_points` are allocated using `PetscCalloc1` and must be freed by the user. 678 679 @param[in] comm `MPI_Comm` for creating `FE` object from options 680 @param[in] fe PETSc basis `FE` object 681 @param[out] tabulation PETSc basis tabulation 682 @param[out] q_points_1d Quadrature points in libCEED orientation 683 @param[out] q_weights_1d Quadrature weights in libCEED orientation 684 685 @return An error code: 0 - success, otherwise - failure 686 **/ 687 static inline PetscErrorCode Create1DTabulation_Tensor(MPI_Comm comm, PetscFE fe, PetscTabulation *tabulation, PetscReal **q_points_1d, 688 CeedScalar **q_weights_1d) { 689 PetscFE fe_1d; 690 PetscInt dim, num_comp, Q = -1, q_dim = -1, num_derivatives = 1; 691 PetscQuadrature quadrature; 692 693 PetscFunctionBeginUser; 694 // Create 1D FE 695 PetscCall(PetscFEGetNumComponents(fe, &num_comp)); 696 PetscCall(PetscFEGetSpatialDimension(fe, &dim)); 697 { 698 const char *prefix; 699 PetscBool is_tensor; 700 PetscInt num_comp, order, q_order; 701 PetscDualSpace dual_space; 702 PetscQuadrature quadrature; 703 704 PetscCall(PetscObjectGetOptionsPrefix((PetscObject)fe, &prefix)); 705 PetscCall(PetscFEGetDualSpace(fe, &dual_space)); 706 PetscCall(PetscFEGetNumComponents(fe, &num_comp)); 707 PetscCall(PetscDualSpaceLagrangeGetTensor(dual_space, &is_tensor)); 708 PetscCall(PetscDualSpaceGetOrder(dual_space, &order)); 709 PetscCall(PetscFEGetQuadrature(fe, &quadrature)); 710 PetscCall(PetscQuadratureGetData(quadrature, NULL, NULL, &q_order, NULL, NULL)); 711 PetscCall(PetscFECreateLagrangeFromOptions(comm, 1, num_comp, !is_tensor, order, 712 (PetscInt)PetscCeilReal(PetscPowReal(1.0 * q_order, 1.0 / dim)) - 1, prefix, &fe_1d)); 713 } 714 715 // Get quadrature data 716 PetscCall(PetscFEGetQuadrature(fe_1d, &quadrature)); 717 PetscCall(GetQuadratureDataP2C(fe_1d, quadrature, &q_dim, NULL, &Q, q_points_1d, q_weights_1d)); 718 719 // Compute 1D tabulation 720 PetscCall(PetscFECreateTabulation(fe_1d, 1, Q, *q_points_1d, num_derivatives, tabulation)); 721 722 // Cleanup 723 PetscCall(PetscFEDestroy(&fe_1d)); 724 725 PetscFunctionReturn(PETSC_SUCCESS); 726 } 727 728 /** 729 @brief Destroy `CeedBasis` in a `PetscContainer`. 730 731 Not collective across MPI processes. 732 733 @param[in,out] ctx `CeedBasis` 734 735 @return An error code: 0 - success, otherwise - failure 736 **/ 737 static PetscErrorCode DMPlexCeedBasisDestroy(void **ctx) { 738 CeedBasis basis = *(CeedBasis *)ctx; 739 740 PetscFunctionBegin; 741 PetscCallCeed(CeedBasisReturnCeed(basis), CeedBasisDestroy(&basis)); 742 PetscFunctionReturn(PETSC_SUCCESS); 743 } 744 745 /** 746 @brief Create `CeedBasis` from `DMPlex`. 747 748 The `CeedBasis` is stored in the `DMPlex` object for reuse; 749 if the routine is called again with the same arguments, the same `CeedBasis` is returned. 750 751 Collective across MPI processes. 752 753 @param[in] ceed `Ceed` context 754 @param[in] dm `DMPlex` holding mesh 755 @param[in] domain_label `DMLabel` for `DMPlex` domain 756 @param[in] label_value Stratum value 757 @param[in] height Height of `DMPlex` topology 758 @param[in] dm_field Index of `DMPlex` field 759 @param[out] basis `CeedBasis` for `DMPlex` 760 761 @return An error code: 0 - success, otherwise - failure 762 **/ 763 PetscErrorCode DMPlexCeedBasisCreate(Ceed ceed, DM dm, DMLabel domain_label, PetscInt label_value, PetscInt height, PetscInt dm_field, 764 CeedBasis *basis) { 765 MPI_Comm comm = PetscObjectComm((PetscObject)dm); 766 char container_name[1024]; 767 CeedBasis container_basis; 768 769 PetscFunctionBeginUser; 770 { 771 const char *label_name = NULL; 772 773 if (domain_label) PetscCall(PetscObjectGetName((PetscObject)domain_label, &label_name)); 774 PetscCall(PetscSNPrintf(container_name, sizeof(container_name), 775 "DM CeedBasis - label: %s label value: %" PetscInt_FMT " height: %" PetscInt_FMT " DM field: %" PetscInt_FMT, 776 label_name ? label_name : "NONE", label_value, height, dm_field)); 777 } 778 PetscCall(PetscObjectContainerQuery((PetscObject)dm, container_name, (void **)&container_basis)); 779 780 if (container_basis) { 781 // Copy existing basis 782 *basis = NULL; 783 PetscCallCeed(ceed, CeedBasisReferenceCopy(container_basis, basis)); 784 } else { 785 PetscDS ds; 786 PetscFE fe; 787 PetscDualSpace dual_space; 788 PetscQuadrature quadrature; 789 PetscBool is_tensor = PETSC_TRUE; 790 PetscInt ds_field = -1; 791 792 // Get element information 793 PetscCall(DMGetRegionDS(dm, domain_label, NULL, &ds, NULL)); 794 PetscCall(DMFieldToDSField(dm, domain_label, dm_field, &ds_field)); 795 PetscCall(PetscDSGetDiscretization(ds, ds_field, (PetscObject *)&fe)); 796 PetscCall(PetscFEGetHeightSubspace(fe, height, &fe)); 797 PetscCall(PetscFEGetQuadrature(fe, &quadrature)); 798 PetscCall(PetscFEGetDualSpace(fe, &dual_space)); 799 PetscCall(PetscDualSpaceLagrangeGetTensor(dual_space, &is_tensor)); 800 801 // Build libCEED basis 802 PetscBool use_nontensor = PETSC_FALSE; 803 804 PetscOptionsBegin(comm, NULL, "Tensor Basis as Nontensor Check", NULL); 805 PetscCall(PetscOptionsBool("-ceed_basis_as_nontensor", "", NULL, use_nontensor, &use_nontensor, NULL)); 806 PetscOptionsEnd(); 807 808 if (!is_tensor || use_nontensor) { 809 PetscTabulation basis_tabulation; 810 PetscInt num_derivatives = 1, face = 0; 811 CeedScalar *q_points, *q_weights, *interp, *grad; 812 CeedElemTopology elem_topo; 813 814 { 815 DMPolytopeType cell_type; 816 817 PetscCall(GetGlobalDMPlexPolytopeType(dm, domain_label, label_value, height, &cell_type)); 818 elem_topo = PolytopeTypePetscToCeed(cell_type); 819 PetscCheck(elem_topo, comm, PETSC_ERR_SUP, "DMPlex topology not supported: %s", DMPolytopeTypes[cell_type]); 820 } 821 822 // Compute basis tabulation 823 PetscCall(GetQuadratureDataP2C(fe, quadrature, NULL, NULL, NULL, &q_points, &q_weights)); 824 PetscCall(PetscFEGetCellTabulation(fe, num_derivatives, &basis_tabulation)); 825 PetscCall(ComputeFieldTabulationP2C(dm, dm_field, face, basis_tabulation, &interp, &grad)); 826 { 827 PetscInt num_comp = basis_tabulation->Nc, P = basis_tabulation->Nb / num_comp, Q = basis_tabulation->Np; 828 829 PetscCallCeed(ceed, CeedBasisCreateH1(ceed, elem_topo, num_comp, P, Q, interp, grad, q_points, q_weights, basis)); 830 } 831 832 PetscCall(PetscFree(q_points)); 833 PetscCall(PetscFree(q_weights)); 834 PetscCall(PetscFree(interp)); 835 PetscCall(PetscFree(grad)); 836 } else { 837 PetscInt P_1d, Q_1d, num_comp, dim; 838 PetscTabulation basis_tabulation; 839 CeedScalar *q_points_1d, *q_weights_1d, *interp_1d, *grad_1d; 840 841 PetscCall(PetscFEGetSpatialDimension(fe, &dim)); 842 PetscCall(Create1DTabulation_Tensor(comm, fe, &basis_tabulation, &q_points_1d, &q_weights_1d)); 843 num_comp = basis_tabulation->Nc; 844 P_1d = basis_tabulation->Nb / num_comp; 845 Q_1d = basis_tabulation->Np; 846 PetscCall(ComputeFieldTabulationP2C(dm, dm_field, 0, basis_tabulation, &interp_1d, &grad_1d)); 847 PetscCallCeed(ceed, CeedBasisCreateTensorH1(ceed, dim, num_comp, P_1d, Q_1d, interp_1d, grad_1d, q_points_1d, q_weights_1d, basis)); 848 849 // Cleanup 850 PetscCall(PetscFree(q_points_1d)); 851 PetscCall(PetscFree(q_weights_1d)); 852 PetscCall(PetscFree(interp_1d)); 853 PetscCall(PetscFree(grad_1d)); 854 PetscCall(PetscTabulationDestroy(&basis_tabulation)); 855 } 856 // Set in container 857 PetscCallCeed(ceed, CeedBasisReferenceCopy(*basis, &container_basis)); 858 PetscCall(PetscObjectContainerCompose((PetscObject)dm, container_name, container_basis, DMPlexCeedBasisDestroy)); 859 } 860 PetscFunctionReturn(PETSC_SUCCESS); 861 } 862 863 /** 864 @brief Create `CeedBasis` for coordinate space of `DMPlex`. 865 866 The `CeedBasis` is stored in the coordinate `DMPlex` object for reuse; 867 if the routine is called again with the same arguments, the same `CeedBasis` is returned. 868 869 Collective across MPI processes. 870 871 @param[in] ceed `Ceed` context 872 @param[in] dm `DMPlex` holding mesh 873 @param[in] domain_label `DMLabel` for `DMPlex` domain 874 @param[in] label_value Stratum value 875 @param[in] height Height of `DMPlex` topology 876 @param[out] basis `CeedBasis` for coordinate space of `DMPlex` 877 **/ 878 PetscErrorCode DMPlexCeedBasisCoordinateCreate(Ceed ceed, DM dm, DMLabel domain_label, PetscInt label_value, PetscInt height, CeedBasis *basis) { 879 DM dm_coord = NULL; 880 881 PetscFunctionBeginUser; 882 PetscCall(DMGetCellCoordinateDM(dm, &dm_coord)); 883 if (!dm_coord) PetscCall(DMGetCoordinateDM(dm, &dm_coord)); 884 PetscCall(DMPlexCeedBasisCreate(ceed, dm_coord, domain_label, label_value, height, 0, basis)); 885 PetscFunctionReturn(PETSC_SUCCESS); 886 } 887 888 /** 889 @brief Create `CeedBasis` for cell to face quadrature space evaluation from `DMPlex`. 890 891 Collective across MPI processes. 892 893 @param[in] ceed `Ceed` context 894 @param[in] dm `DMPlex` holding mesh 895 @param[in] domain_label `DMLabel` for `DMPlex` domain 896 @param[in] label_value Stratum value 897 @param[in] face Index of face 898 @param[in] dm_field Index of `DMPlex` field 899 @param[out] basis `CeedBasis` for cell to face evaluation 900 **/ 901 PetscErrorCode DMPlexCeedBasisCellToFaceCreate(Ceed ceed, DM dm, DMLabel domain_label, PetscInt label_value, PetscInt face, PetscInt dm_field, 902 CeedBasis *basis) { 903 PetscInt ds_field = -1, height = 0; 904 PetscDS ds; 905 PetscFE fe; 906 PetscQuadrature face_quadrature; 907 908 PetscFunctionBeginUser; 909 // Get element information 910 PetscCall(DMGetRegionDS(dm, domain_label, NULL, &ds, NULL)); 911 PetscCall(DMFieldToDSField(dm, domain_label, dm_field, &ds_field)); 912 PetscCall(PetscDSGetDiscretization(ds, ds_field, (PetscObject *)&fe)); 913 PetscCall(PetscFEGetFaceQuadrature(fe, &face_quadrature)); 914 915 { // Build libCEED basis 916 PetscInt num_derivatives = 1, num_comp, P, Q = -1; 917 PetscTabulation basis_tabulation; 918 CeedScalar *q_points, *q_weights, *interp, *grad; 919 CeedElemTopology elem_topo; 920 921 { // Verify compatible element topology 922 DMPolytopeType cell_type; 923 924 PetscCall(GetGlobalDMPlexPolytopeType(dm, domain_label, label_value, height, &cell_type)); 925 elem_topo = PolytopeTypePetscToCeed(cell_type); 926 PetscCheck(elem_topo, PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "DMPlex topology not supported: %s", DMPolytopeTypes[cell_type]); 927 } 928 929 // Compute basis tabulation 930 PetscCall(GetQuadratureDataP2C(fe, face_quadrature, NULL, NULL, &Q, &q_points, &q_weights)); 931 PetscCall(PetscFEGetFaceTabulation(fe, num_derivatives, &basis_tabulation)); 932 num_comp = basis_tabulation->Nc; 933 P = basis_tabulation->Nb / num_comp; 934 PetscCall(ComputeFieldTabulationP2C(dm, dm_field, face, basis_tabulation, &interp, &grad)); 935 PetscCallCeed(ceed, CeedBasisCreateH1(ceed, elem_topo, num_comp, P, Q, interp, grad, q_points, q_weights, basis)); 936 937 PetscCall(PetscFree(q_points)); 938 PetscCall(PetscFree(q_weights)); 939 PetscCall(PetscFree(interp)); 940 PetscCall(PetscFree(grad)); 941 } 942 PetscFunctionReturn(PETSC_SUCCESS); 943 } 944 945 /** 946 @brief Create `CeedBasis` for cell to face quadrature space evaluation on coordinate space of `DMPlex`. 947 948 Collective across MPI processes. 949 950 @param[in] ceed `Ceed` context 951 @param[in] dm `DMPlex` holding mesh 952 @param[in] domain_label `DMLabel` for `DMPlex` domain 953 @param[in] label_value Stratum value 954 @param[in] face Index of face 955 @param[out] basis `CeedBasis` for cell to face evaluation on coordinate space of `DMPlex` 956 **/ 957 PetscErrorCode DMPlexCeedBasisCellToFaceCoordinateCreate(Ceed ceed, DM dm, DMLabel domain_label, PetscInt label_value, PetscInt face, 958 CeedBasis *basis) { 959 DM dm_coord; 960 961 PetscFunctionBeginUser; 962 PetscCall(DMGetCellCoordinateDM(dm, &dm_coord)); 963 if (!dm_coord) { 964 PetscCall(DMGetCoordinateDM(dm, &dm_coord)); 965 } 966 PetscCall(DMPlexCeedBasisCellToFaceCreate(ceed, dm_coord, domain_label, label_value, face, 0, basis)); 967 PetscFunctionReturn(PETSC_SUCCESS); 968 } 969 970 /** 971 @brief Setup `DM` with FE space of appropriate degree 972 973 Must be followed by `DMSetupByOrderEnd_FEM` 974 975 @param[in] setup_faces Flag to setup face geometry 976 @param[in] setup_coords Flag to setup coordinate spaces 977 @param[in] degree Polynomial orders of field 978 @param[in] coord_order Polynomial order of coordinate basis, or `PETSC_DECIDE` for default 979 @param[in] q_extra Additional quadrature order 980 @param[in] num_fields Number of fields in solution vector 981 @param[in] field_sizes Array of number of components for each field 982 @param[out] dm `DM` to setup 983 984 @return An error code: 0 - success, otherwise - failure 985 **/ 986 PetscErrorCode DMSetupByOrderBegin_FEM(PetscBool setup_faces, PetscBool setup_coords, PetscInt degree, PetscInt coord_order, PetscInt q_extra, 987 PetscInt num_fields, const PetscInt *field_sizes, DM dm) { 988 PetscInt dim, q_order = degree + q_extra; 989 PetscBool is_simplex = PETSC_TRUE; 990 PetscFE fe; 991 MPI_Comm comm = PetscObjectComm((PetscObject)dm); 992 993 PetscFunctionBeginUser; 994 PetscCall(DMPlexIsSimplex(dm, &is_simplex)); 995 996 // Setup DM 997 PetscCall(DMGetDimension(dm, &dim)); 998 for (PetscInt i = 0; i < num_fields; i++) { 999 PetscFE fe_face; 1000 PetscInt q_order = degree + q_extra; 1001 1002 PetscCall(PetscFECreateLagrange(comm, dim, field_sizes[i], is_simplex, degree, q_order, &fe)); 1003 if (setup_faces) PetscCall(PetscFEGetHeightSubspace(fe, 1, &fe_face)); 1004 PetscCall(DMAddField(dm, NULL, (PetscObject)fe)); 1005 PetscCall(PetscFEDestroy(&fe)); 1006 } 1007 PetscCall(DMCreateDS(dm)); 1008 1009 // Project coordinates to enrich quadrature space 1010 if (setup_coords) { 1011 DM dm_coord; 1012 PetscDS ds_coord; 1013 PetscFE fe_coord_current, fe_coord_new, fe_coord_face_new; 1014 PetscDualSpace fe_coord_dual_space; 1015 PetscInt fe_coord_order, num_comp_coord; 1016 1017 PetscCall(DMGetCoordinateDM(dm, &dm_coord)); 1018 PetscCall(DMGetCoordinateDim(dm, &num_comp_coord)); 1019 PetscCall(DMGetRegionDS(dm_coord, NULL, NULL, &ds_coord, NULL)); 1020 PetscCall(PetscDSGetDiscretization(ds_coord, 0, (PetscObject *)&fe_coord_current)); 1021 PetscCall(PetscFEGetDualSpace(fe_coord_current, &fe_coord_dual_space)); 1022 PetscCall(PetscDualSpaceGetOrder(fe_coord_dual_space, &fe_coord_order)); 1023 1024 // Create FE for coordinates 1025 if (coord_order != PETSC_DECIDE) fe_coord_order = coord_order; 1026 PetscCall(PetscFECreateLagrange(comm, dim, num_comp_coord, is_simplex, fe_coord_order, q_order, &fe_coord_new)); 1027 if (setup_faces) PetscCall(PetscFEGetHeightSubspace(fe_coord_new, 1, &fe_coord_face_new)); 1028 PetscCall(DMSetCoordinateDisc(dm, fe_coord_new, PETSC_FALSE, PETSC_TRUE)); 1029 PetscCall(PetscFEDestroy(&fe_coord_new)); 1030 } 1031 PetscFunctionReturn(PETSC_SUCCESS); 1032 } 1033 1034 /** 1035 @brief Finish setting up `DM` 1036 1037 Must be called after `DMSetupByOrderBegin_FEM` and all strong BCs have be declared via `DMAddBoundaries`. 1038 1039 @param[in] setup_coords Flag to setup coordinate spaces 1040 @param[out] dm `DM` to setup 1041 1042 @return An error code: 0 - success, otherwise - failure 1043 **/ 1044 PetscErrorCode DMSetupByOrderEnd_FEM(PetscBool setup_coords, DM dm) { 1045 PetscBool is_simplex; 1046 1047 PetscFunctionBeginUser; 1048 PetscCall(DMPlexIsSimplex(dm, &is_simplex)); 1049 // Set tensor permutation if needed 1050 if (!is_simplex) { 1051 PetscCall(DMPlexSetClosurePermutationTensor(dm, PETSC_DETERMINE, NULL)); 1052 if (setup_coords) { 1053 DM dm_coord; 1054 1055 PetscCall(DMGetCoordinateDM(dm, &dm_coord)); 1056 PetscCall(DMPlexSetClosurePermutationTensor(dm_coord, PETSC_DETERMINE, NULL)); 1057 } 1058 } 1059 PetscCall(DMLocalizeCoordinates(dm)); // Must localize after tensor closure setting 1060 PetscFunctionReturn(PETSC_SUCCESS); 1061 } 1062 1063 /** 1064 @brief Setup `DM` with FE space of appropriate degree with no boundary conditions 1065 1066 Calls `DMSetupByOrderBegin_FEM` and `DMSetupByOrderEnd_FEM` successively 1067 1068 @param[in] setup_faces Flag to setup face geometry 1069 @param[in] setup_coords Flag to setup coordinate spaces 1070 @param[in] degree Polynomial orders of field 1071 @param[in] coord_order Polynomial order of coordinate basis, or `PETSC_DECIDE` for default 1072 @param[in] q_extra Additional quadrature order 1073 @param[in] num_fields Number of fields in solution vector 1074 @param[in] field_sizes Array of number of components for each field 1075 @param[out] dm `DM` to setup 1076 1077 @return An error code: 0 - success, otherwise - failure 1078 **/ 1079 PetscErrorCode DMSetupByOrder_FEM(PetscBool setup_faces, PetscBool setup_coords, PetscInt degree, PetscInt coord_order, PetscInt q_extra, 1080 PetscInt num_fields, const PetscInt *field_sizes, DM dm) { 1081 PetscFunctionBeginUser; 1082 PetscCall(DMSetupByOrderBegin_FEM(setup_faces, setup_coords, degree, coord_order, q_extra, num_fields, field_sizes, dm)); 1083 PetscCall(DMSetupByOrderEnd_FEM(setup_coords, dm)); 1084 PetscFunctionReturn(PETSC_SUCCESS); 1085 } 1086 1087 /** 1088 @brief Get the points in a label stratum which have requested height 1089 1090 Example, from the "Face Sets" label (which contains face, edge, and vertex points), return points that correspond to face points (height = 1) and have a label value of 3: 1091 ``` 1092 PetscCall(DMGetStratumISAtHeight(dm, "Face Sets", 3, 1, &is_face_points)); 1093 ``` 1094 1095 @param[in] dm DM which has the label 1096 @param[in] name Name fo the label 1097 @param[in] value Label value of points to return 1098 @param[in] height Height of points to return 1099 @param[out] points `IS` of points, or `NULL` is there are no points in that stratum at height 1100 **/ 1101 static PetscErrorCode DMGetStratumISAtHeight(DM dm, const char name[], PetscInt value, PetscInt height, IS *points) { 1102 PetscInt depth; 1103 DMLabel depth_label; 1104 IS label_is, depth_is; 1105 1106 PetscFunctionBeginUser; 1107 PetscCall(DMPlexGetDepth(dm, &depth)); 1108 PetscCall(DMPlexGetDepthLabel(dm, &depth_label)); 1109 PetscCall(DMLabelGetStratumIS(depth_label, depth - height, &depth_is)); 1110 PetscCall(DMGetStratumIS(dm, name, value, &label_is)); 1111 if (label_is == NULL || depth_is == NULL) *points = NULL; 1112 else PetscCall(ISIntersect(depth_is, label_is, points)); 1113 PetscCall(ISDestroy(&depth_is)); 1114 PetscCall(ISDestroy(&label_is)); 1115 PetscFunctionReturn(PETSC_SUCCESS); 1116 } 1117 1118 /** 1119 @brief Create a label with separate values for the `FE` orientations for all cells with this material for the `DMPlex` face. 1120 1121 Collective across MPI processes. 1122 1123 @param[in,out] dm `DMPlex` holding mesh 1124 @param[in] dm_face Face number on `DMPlex` 1125 @param[out] face_label_name Label name for newly created label, or `NULL` if no elements match the `material` and `dm_face` 1126 **/ 1127 PetscErrorCode DMPlexCreateFaceLabel(DM dm, PetscInt dm_face, char **face_label_name) { 1128 PetscInt num_face_points = 0, face_height = 1; 1129 const PetscInt *face_points; 1130 IS is_face_points; 1131 DMLabel face_label = NULL; 1132 MPI_Comm comm = PetscObjectComm((PetscObject)dm); 1133 1134 PetscFunctionBeginUser; 1135 { // Create label name and label 1136 PetscSizeT label_name_len = 64; 1137 PetscBool has_label_already; 1138 1139 // Face label 1140 PetscCall(PetscCalloc1(label_name_len, face_label_name)); 1141 PetscCall(PetscSNPrintf(*face_label_name, label_name_len, "Orientation for DM face %" PetscInt_FMT, dm_face)); 1142 PetscCall(DMHasLabel(dm, *face_label_name, &has_label_already)); 1143 if (has_label_already) PetscFunctionReturn(PETSC_SUCCESS); 1144 1145 PetscCall(DMCreateLabel(dm, *face_label_name)); 1146 PetscCall(DMGetLabel(dm, *face_label_name, &face_label)); 1147 } 1148 1149 PetscCall(DMGetStratumISAtHeight(dm, "Face Sets", dm_face, face_height, &is_face_points)); 1150 if (is_face_points) { 1151 PetscCall(ISGetLocalSize(is_face_points, &num_face_points)); 1152 PetscCall(ISGetIndices(is_face_points, &face_points)); 1153 } 1154 1155 for (PetscInt face_index = 0; face_index < num_face_points; face_index++) { 1156 const PetscInt *face_support, *cell_cone; 1157 PetscInt face_support_size = 0, cell_cone_size = 0, fe_face_on_cell = -1; 1158 PetscInt face_point = face_points[face_index]; 1159 1160 // Get cell supporting face 1161 PetscCall(DMPlexGetSupport(dm, face_point, &face_support)); 1162 PetscCall(DMPlexGetSupportSize(dm, face_point, &face_support_size)); 1163 PetscCheck(face_support_size == 1, comm, PETSC_ERR_SUP, "Expected one cell in support of exterior face, but got %" PetscInt_FMT " cells", 1164 face_support_size); 1165 PetscInt cell_point = face_support[0]; 1166 1167 // Find face number 1168 PetscCall(DMPlexGetCone(dm, cell_point, &cell_cone)); 1169 PetscCall(DMPlexGetConeSize(dm, cell_point, &cell_cone_size)); 1170 for (PetscInt i = 0; i < cell_cone_size; i++) { 1171 if (cell_cone[i] == face_point) fe_face_on_cell = i; 1172 } 1173 PetscCheck(fe_face_on_cell != -1, comm, PETSC_ERR_SUP, "Could not find face %" PetscInt_FMT " in cone of its support", face_point); 1174 1175 // Add to label 1176 PetscCall(DMLabelSetValue(face_label, face_point, fe_face_on_cell)); 1177 } 1178 PetscCall(DMPlexLabelAddFaceCells(dm, face_label)); 1179 PetscCall(ISDestroy(&is_face_points)); 1180 PetscFunctionReturn(PETSC_SUCCESS); 1181 } 1182 1183 /** 1184 @brief Creates an array of all the values set for a `DMLabel` 1185 1186 Because `DMLabelGetValueIS` returns label values locally, not globally. 1187 1188 Collective across MPI processes. 1189 1190 @param[in] dm `DM` with the label 1191 @param[in] label `DMLabel` to get values of 1192 @param[out] num_values Total number of values 1193 @param[out] value_array Array of label values, must be freed by user 1194 **/ 1195 PetscErrorCode DMLabelCreateGlobalValueArray(DM dm, DMLabel label, PetscInt *num_values, PetscInt **value_array) { 1196 PetscInt num_values_local, minmax_values[2], minmax_values_loc[2]; 1197 PetscInt *values_local = NULL; 1198 MPI_Comm comm = PetscObjectComm((PetscObject)dm); 1199 PetscSegBuffer seg; 1200 PetscCount seg_size; 1201 1202 PetscFunctionBeginUser; 1203 { // Extract array of local DMLabel values so it can be sorted 1204 IS is_values; 1205 const PetscInt *is_values_local = NULL; 1206 1207 PetscCall(DMLabelGetValueIS(label, &is_values)); 1208 PetscCall(ISGetLocalSize(is_values, &num_values_local)); 1209 1210 PetscCall(ISGetIndices(is_values, &is_values_local)); 1211 PetscCall(PetscMalloc1(num_values_local, &values_local)); 1212 PetscCall(PetscArraycpy(values_local, is_values_local, num_values_local)); 1213 PetscCall(ISRestoreIndices(is_values, &is_values_local)); 1214 PetscCall(ISDestroy(&is_values)); 1215 } 1216 1217 if (num_values_local) { 1218 PetscCall(PetscSortInt(num_values_local, values_local)); 1219 minmax_values_loc[0] = values_local[0]; 1220 minmax_values_loc[1] = values_local[num_values_local - 1]; 1221 } else { 1222 // Rank has no set label values, therefore set local min/max to have no effect on global min/max 1223 minmax_values_loc[0] = PETSC_INT_MAX; 1224 minmax_values_loc[1] = PETSC_INT_MIN; 1225 } 1226 PetscCall(PetscGlobalMinMaxInt(comm, minmax_values_loc, minmax_values)); 1227 1228 PetscCall(PetscSegBufferCreate(sizeof(PetscInt), 16, &seg)); 1229 for (PetscInt value = minmax_values[0]; value <= minmax_values[1]; value++) { 1230 PetscInt location_local = -1, location_global = -1; 1231 1232 PetscCall(PetscFindInt(value, num_values_local, values_local, &location_local)); 1233 PetscCallMPI(MPIU_Allreduce(&location_local, &location_global, 1, MPIU_INT, MPI_MAX, comm)); 1234 if (location_global < 0) continue; 1235 else { 1236 PetscInt *seg_slot; 1237 1238 PetscCall(PetscSegBufferGetInts(seg, 1, &seg_slot)); 1239 *seg_slot = value; 1240 } 1241 } 1242 PetscCall(PetscSegBufferGetSize(seg, &seg_size)); 1243 *num_values = (PetscInt)seg_size; 1244 PetscCall(PetscSegBufferExtractAlloc(seg, value_array)); 1245 PetscCall(PetscSegBufferDestroy(&seg)); 1246 if (values_local) PetscCall(PetscFree(values_local)); 1247 PetscFunctionReturn(PETSC_SUCCESS); 1248 } 1249 1250 /** 1251 @brief Get the number of components for `dm`'s coordinate field 1252 1253 @param[in] dm DM 1254 @param[out] num_comp Number of components for the coordinate field 1255 **/ 1256 PetscErrorCode DMGetCoordinateNumComps(DM dm, PetscInt *num_comp) { 1257 DM dm_coord; 1258 PetscSection section_coord; 1259 PetscInt field = 0; // Default field has the coordinates 1260 1261 PetscFunctionBeginUser; 1262 PetscCall(DMGetCoordinateDM(dm, &dm_coord)); 1263 PetscCall(DMGetLocalSection(dm_coord, §ion_coord)); 1264 PetscCall(PetscSectionGetFieldComponents(section_coord, field, num_comp)); 1265 PetscFunctionReturn(PETSC_SUCCESS); 1266 } 1267 1268 /** 1269 @brief Get the number of components for `dm`'s field 1270 1271 @param[in] dm DM 1272 @param[in] field The field ID to get the number of components for 1273 @param[out] num_comp Number of components for the coordinate field 1274 **/ 1275 PetscErrorCode DMGetFieldNumComps(DM dm, PetscInt field, PetscInt *num_comp) { 1276 PetscSection section; 1277 1278 PetscFunctionBeginUser; 1279 PetscCall(DMGetLocalSection(dm, §ion)); 1280 PetscCall(PetscSectionGetFieldComponents(section, field, num_comp)); 1281 PetscFunctionReturn(PETSC_SUCCESS); 1282 } 1283