1 static char help[] = "Load and save the mesh and fields to HDF5 and ExodusII\n\n"; 2 3 #include <petsc/private/dmpleximpl.h> 4 #include <petscviewerhdf5.h> 5 #include <petscsf.h> 6 7 typedef struct { 8 PetscBool compare; /* Compare the meshes using DMPlexEqual() */ 9 PetscBool compare_labels; /* Compare labels in the meshes using DMCompareLabels() */ 10 PetscBool distribute; /* Distribute the mesh */ 11 PetscBool field; /* Layout a field over the mesh */ 12 PetscBool reorder; /* Reorder the points in the section */ 13 char ofname[PETSC_MAX_PATH_LEN]; /* Output mesh filename */ 14 PetscViewerFormat format; /* Format to write and read */ 15 PetscBool second_write_read; /* Write and read for the 2nd time */ 16 PetscBool use_low_level_functions; /* Use low level functions for viewing and loading */ 17 } AppCtx; 18 19 static PetscErrorCode ProcessOptions(MPI_Comm comm, AppCtx *options) 20 { 21 PetscFunctionBeginUser; 22 options->compare = PETSC_FALSE; 23 options->compare_labels = PETSC_FALSE; 24 options->distribute = PETSC_TRUE; 25 options->field = PETSC_FALSE; 26 options->reorder = PETSC_FALSE; 27 options->format = PETSC_VIEWER_DEFAULT; 28 options->second_write_read = PETSC_FALSE; 29 options->use_low_level_functions = PETSC_FALSE; 30 PetscCall(PetscStrncpy(options->ofname, "ex55.h5", sizeof(options->ofname))); 31 32 PetscOptionsBegin(comm, "", "Meshing Problem Options", "DMPLEX"); 33 PetscCall(PetscOptionsBool("-compare", "Compare the meshes using DMPlexEqual()", "ex55.c", options->compare, &options->compare, NULL)); 34 PetscCall(PetscOptionsBool("-compare_labels", "Compare labels in the meshes using DMCompareLabels()", "ex55.c", options->compare_labels, &options->compare_labels, NULL)); 35 PetscCall(PetscOptionsBool("-distribute", "Distribute the mesh", "ex55.c", options->distribute, &options->distribute, NULL)); 36 PetscCall(PetscOptionsBool("-field", "Layout a field over the mesh", "ex55.c", options->field, &options->field, NULL)); 37 PetscCall(PetscOptionsBool("-reorder", "Reorder the points in the section", "ex55.c", options->reorder, &options->reorder, NULL)); 38 PetscCall(PetscOptionsString("-ofilename", "The output mesh file", "ex55.c", options->ofname, options->ofname, sizeof(options->ofname), NULL)); 39 PetscCall(PetscOptionsEnum("-format", "Format to write and read", "ex55.c", PetscViewerFormats, (PetscEnum)options->format, (PetscEnum *)&options->format, NULL)); 40 PetscCall(PetscOptionsBool("-second_write_read", "Write and read for the 2nd time", "ex55.c", options->second_write_read, &options->second_write_read, NULL)); 41 PetscCall(PetscOptionsBool("-use_low_level_functions", "Use low level functions for viewing and loading", "ex55.c", options->use_low_level_functions, &options->use_low_level_functions, NULL)); 42 PetscOptionsEnd(); 43 PetscFunctionReturn(PETSC_SUCCESS); 44 } 45 46 static PetscErrorCode CreateMesh(MPI_Comm comm, DM *dm) 47 { 48 PetscFunctionBeginUser; 49 PetscCall(DMCreate(comm, dm)); 50 PetscCall(DMSetType(*dm, DMPLEX)); 51 PetscCall(DMSetOptionsPrefix(*dm, "orig_")); 52 PetscCall(DMSetFromOptions(*dm)); 53 PetscCall(DMViewFromOptions(*dm, NULL, "-dm_view")); 54 PetscFunctionReturn(PETSC_SUCCESS); 55 } 56 57 static PetscErrorCode CreateDiscretization(DM dm) 58 { 59 PetscFE fe; 60 DMPolytopeType ct; 61 PetscInt dim, cStart; 62 63 PetscFunctionBeginUser; 64 PetscCall(DMGetDimension(dm, &dim)); 65 PetscCall(DMPlexGetHeightStratum(dm, 0, &cStart, NULL)); 66 PetscCall(DMPlexGetCellType(dm, cStart, &ct)); 67 PetscCall(PetscFECreateLagrangeByCell(PETSC_COMM_SELF, dim, 1, ct, 1, PETSC_DETERMINE, &fe)); 68 PetscCall(DMSetField(dm, 0, NULL, (PetscObject)fe)); 69 PetscCall(PetscFEDestroy(&fe)); 70 PetscCall(DMCreateDS(dm)); 71 PetscFunctionReturn(PETSC_SUCCESS); 72 } 73 74 static PetscErrorCode CheckDistributed(DM dm, PetscBool expectedDistributed) 75 { 76 PetscMPIInt size; 77 PetscBool distributed; 78 const char YES[] = "DISTRIBUTED"; 79 const char NO[] = "NOT DISTRIBUTED"; 80 81 PetscFunctionBeginUser; 82 PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)dm), &size)); 83 if (size < 2) PetscFunctionReturn(PETSC_SUCCESS); 84 PetscCall(DMPlexIsDistributed(dm, &distributed)); 85 PetscCheck(distributed == expectedDistributed, PetscObjectComm((PetscObject)dm), PETSC_ERR_PLIB, "Expected DM being %s but actually is %s", expectedDistributed ? YES : NO, distributed ? YES : NO); 86 PetscFunctionReturn(PETSC_SUCCESS); 87 } 88 89 static PetscErrorCode CheckInterpolated(DM dm, PetscBool expectedInterpolated) 90 { 91 DMPlexInterpolatedFlag iflg; 92 PetscBool interpolated; 93 const char YES[] = "INTERPOLATED"; 94 const char NO[] = "NOT INTERPOLATED"; 95 96 PetscFunctionBeginUser; 97 PetscCall(DMPlexIsInterpolatedCollective(dm, &iflg)); 98 interpolated = (PetscBool)(iflg == DMPLEX_INTERPOLATED_FULL); 99 PetscCheck(interpolated == expectedInterpolated, PetscObjectComm((PetscObject)dm), PETSC_ERR_PLIB, "Expected DM being %s but actually is %s", expectedInterpolated ? YES : NO, interpolated ? YES : NO); 100 PetscFunctionReturn(PETSC_SUCCESS); 101 } 102 103 static PetscErrorCode CheckDistributedInterpolated(DM dm, PetscBool expectedInterpolated, PetscViewer v, AppCtx *user) 104 { 105 PetscViewerFormat format; 106 PetscBool distributed, interpolated = expectedInterpolated; 107 108 PetscFunctionBeginUser; 109 PetscCall(PetscViewerGetFormat(v, &format)); 110 switch (format) { 111 case PETSC_VIEWER_HDF5_XDMF: 112 case PETSC_VIEWER_HDF5_VIZ: { 113 distributed = PETSC_TRUE; 114 interpolated = PETSC_FALSE; 115 }; break; 116 case PETSC_VIEWER_HDF5_PETSC: 117 case PETSC_VIEWER_DEFAULT: 118 case PETSC_VIEWER_NATIVE: { 119 DMPlexStorageVersion version; 120 121 PetscCall(PetscViewerHDF5GetDMPlexStorageVersionReading(v, &version)); 122 distributed = (PetscBool)(version->major >= 3); 123 }; break; 124 default: 125 distributed = PETSC_FALSE; 126 } 127 PetscCall(CheckDistributed(dm, distributed)); 128 PetscCall(CheckInterpolated(dm, interpolated)); 129 PetscFunctionReturn(PETSC_SUCCESS); 130 } 131 132 static PetscErrorCode DMPlexWriteAndReadHDF5(DM dm, Vec vec, const char filename[], const char prefix[], PetscBool expectedInterpolated, AppCtx *user, DM *dm_new, Vec *v_new) 133 { 134 DM dmnew; 135 Vec vnew = NULL; 136 const char savedName[] = "Mesh"; 137 const char loadedName[] = "Mesh_new"; 138 PetscViewer v; 139 140 PetscFunctionBeginUser; 141 PetscCall(PetscViewerHDF5Open(PetscObjectComm((PetscObject)dm), filename, FILE_MODE_WRITE, &v)); 142 PetscCall(PetscViewerPushFormat(v, user->format)); 143 PetscCall(PetscObjectSetName((PetscObject)dm, savedName)); 144 if (user->use_low_level_functions) { 145 PetscCall(DMPlexTopologyView(dm, v)); 146 PetscCall(DMPlexCoordinatesView(dm, v)); 147 PetscCall(DMPlexLabelsView(dm, v)); 148 } else { 149 PetscCall(DMView(dm, v)); 150 if (vec) PetscCall(VecView(vec, v)); 151 } 152 PetscCall(PetscViewerFileSetMode(v, FILE_MODE_READ)); 153 PetscCall(DMCreate(PETSC_COMM_WORLD, &dmnew)); 154 PetscCall(DMSetType(dmnew, DMPLEX)); 155 PetscCall(DMPlexDistributeSetDefault(dmnew, PETSC_FALSE)); 156 PetscCall(PetscObjectSetName((PetscObject)dmnew, savedName)); 157 PetscCall(DMSetOptionsPrefix(dmnew, prefix)); 158 if (user->use_low_level_functions) { 159 PetscSF sfXC; 160 161 PetscCall(DMPlexTopologyLoad(dmnew, v, &sfXC)); 162 PetscCall(DMPlexCoordinatesLoad(dmnew, v, sfXC)); 163 PetscCall(DMPlexLabelsLoad(dmnew, v, sfXC)); 164 PetscCall(PetscSFDestroy(&sfXC)); 165 } else { 166 PetscCall(DMLoad(dmnew, v)); 167 if (vec) { 168 PetscCall(CreateDiscretization(dmnew)); 169 PetscCall(DMCreateGlobalVector(dmnew, &vnew)); 170 PetscCall(PetscObjectSetName((PetscObject)vnew, "solution")); 171 PetscCall(VecLoad(vnew, v)); 172 } 173 } 174 PetscCall(CheckDistributedInterpolated(dmnew, expectedInterpolated, v, user)); 175 PetscCall(PetscObjectSetName((PetscObject)dmnew, loadedName)); 176 PetscCall(PetscViewerPopFormat(v)); 177 PetscCall(PetscViewerDestroy(&v)); 178 *dm_new = dmnew; 179 *v_new = vnew; 180 PetscFunctionReturn(PETSC_SUCCESS); 181 } 182 183 int main(int argc, char **argv) 184 { 185 DM dm, dmnew; 186 Vec v = NULL, vnew = NULL; 187 PetscPartitioner part; 188 AppCtx user; 189 PetscBool interpolated = PETSC_TRUE, flg; 190 191 PetscFunctionBeginUser; 192 PetscCall(PetscInitialize(&argc, &argv, NULL, help)); 193 PetscCall(ProcessOptions(PETSC_COMM_WORLD, &user)); 194 PetscCall(CreateMesh(PETSC_COMM_WORLD, &dm)); 195 PetscCall(PetscOptionsGetBool(NULL, "orig_", "-dm_plex_interpolate", &interpolated, NULL)); 196 PetscCall(CheckInterpolated(dm, interpolated)); 197 198 if (user.distribute) { 199 DM dmdist; 200 201 PetscCall(DMPlexGetPartitioner(dm, &part)); 202 PetscCall(PetscPartitionerSetType(part, PETSCPARTITIONERSIMPLE)); 203 PetscCall(PetscPartitionerSetFromOptions(part)); 204 PetscCall(DMPlexDistribute(dm, 0, NULL, &dmdist)); 205 if (dmdist) { 206 PetscCall(DMDestroy(&dm)); 207 dm = dmdist; 208 PetscCall(CheckDistributed(dm, PETSC_TRUE)); 209 PetscCall(CheckInterpolated(dm, interpolated)); 210 } 211 } 212 if (user.field) { 213 PetscSection gs; 214 PetscScalar *a; 215 PetscInt pStart, pEnd, rStart; 216 217 PetscCall(CreateDiscretization(dm)); 218 219 PetscCall(DMCreateGlobalVector(dm, &v)); 220 PetscCall(PetscObjectSetName((PetscObject)v, "solution")); 221 PetscCall(DMGetGlobalSection(dm, &gs)); 222 PetscCall(PetscSectionGetChart(gs, &pStart, &pEnd)); 223 PetscCall(VecGetOwnershipRange(v, &rStart, NULL)); 224 PetscCall(VecGetArrayWrite(v, &a)); 225 for (PetscInt p = pStart; p < pEnd; ++p) { 226 PetscInt dof, off; 227 228 PetscCall(PetscSectionGetDof(gs, p, &dof)); 229 PetscCall(PetscSectionGetOffset(gs, p, &off)); 230 if (off < 0) continue; 231 for (PetscInt d = 0; d < dof; ++d) a[off + d] = p; 232 } 233 PetscCall(VecRestoreArrayWrite(v, &a)); 234 } 235 236 PetscCall(DMSetOptionsPrefix(dm, NULL)); 237 PetscCall(DMPlexDistributeSetDefault(dm, PETSC_FALSE)); 238 PetscCall(DMSetFromOptions(dm)); 239 PetscCall(DMViewFromOptions(dm, NULL, "-dm_view")); 240 241 PetscCall(DMPlexWriteAndReadHDF5(dm, v, user.ofname, "new_", interpolated, &user, &dmnew, &vnew)); 242 243 if (user.second_write_read) { 244 PetscCall(DMDestroy(&dm)); 245 dm = dmnew; 246 PetscCall(DMPlexWriteAndReadHDF5(dm, v, user.ofname, "new_", interpolated, &user, &dmnew, &vnew)); 247 } 248 249 PetscCall(DMViewFromOptions(dmnew, NULL, "-dm_view")); 250 251 /* This currently makes sense only for sequential meshes. */ 252 if (user.compare) { 253 PetscCall(DMPlexEqual(dmnew, dm, &flg)); 254 PetscCheck(flg, PETSC_COMM_WORLD, PETSC_ERR_ARG_INCOMP, "DMs are not equal"); 255 PetscCall(PetscPrintf(PETSC_COMM_WORLD, "DMs equal\n")); 256 if (v) { 257 PetscCall(VecEqual(vnew, v, &flg)); 258 PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Vecs %s\n", flg ? "equal" : "are not equal")); 259 PetscCall(VecViewFromOptions(v, NULL, "-old_vec_view")); 260 PetscCall(VecViewFromOptions(vnew, NULL, "-new_vec_view")); 261 } 262 } 263 if (user.compare_labels) { 264 PetscCall(DMCompareLabels(dmnew, dm, NULL, NULL)); 265 PetscCall(PetscPrintf(PETSC_COMM_WORLD, "DMLabels equal\n")); 266 } 267 268 PetscCall(VecDestroy(&v)); 269 PetscCall(DMDestroy(&dm)); 270 PetscCall(VecDestroy(&vnew)); 271 PetscCall(DMDestroy(&dmnew)); 272 PetscCall(PetscFinalize()); 273 return 0; 274 } 275 276 /*TEST 277 build: 278 requires: hdf5 279 # Idempotence of saving/loading 280 # Have to replace Exodus file, which is creating uninterpolated edges 281 test: 282 suffix: 0 283 TODO: broken 284 requires: exodusii 285 args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/Rect-tri3.exo -dm_view ascii::ascii_info_detail 286 args: -format hdf5_petsc -compare 287 test: 288 suffix: 1 289 TODO: broken 290 requires: exodusii parmetis !defined(PETSC_USE_64BIT_INDICES) 291 nsize: 2 292 args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/Rect-tri3.exo -dm_view ascii::ascii_info_detail 293 args: -petscpartitioner_type parmetis 294 args: -format hdf5_petsc -new_dm_view ascii::ascii_info_detail 295 296 testset: 297 requires: exodusii 298 args: -orig_dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/blockcylinder-50.exo 299 test: 300 suffix: 2 301 nsize: {{1 2 4 8}separate output} 302 args: -format {{default hdf5_petsc}separate output} 303 args: -dm_plex_view_hdf5_storage_version {{1.0.0 2.0.0 3.0.0}} 304 args: -orig_dm_plex_interpolate {{0 1}separate output} 305 test: 306 suffix: 2a 307 nsize: {{1 2 4 8}separate output} 308 args: -format {{hdf5_xdmf hdf5_viz}separate output} 309 310 test: 311 suffix: 3 312 requires: exodusii 313 args: -orig_dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/blockcylinder-50.exo -compare -compare_labels 314 args: -dm_plex_view_hdf5_storage_version {{1.0.0 2.0.0 3.0.0}} 315 316 # Load HDF5 file in XDMF format in parallel, write, read dm1, write, read dm2, and compare dm1 and dm2 317 testset: 318 suffix: 4 319 requires: !complex 320 args: -orig_dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/blockcylinder-50.h5 -dm_plex_create_from_hdf5_xdmf 321 args: -distribute 0 -second_write_read -compare 322 test: 323 suffix: hdf5_petsc 324 nsize: {{1 2}} 325 args: -format hdf5_petsc -compare_labels 326 args: -dm_plex_view_hdf5_storage_version {{1.0.0 2.0.0 3.0.0}} 327 test: 328 suffix: hdf5_xdmf 329 nsize: {{1 3 8}} 330 args: -format hdf5_xdmf 331 332 # Use low level functions, DMPlexTopologyView()/Load(), DMPlexCoordinatesView()/Load(), and DMPlexLabelsView()/Load() 333 # TODO: The output is very long so keeping just 1.0.0 version. This test should be redesigned or removed. 334 test: 335 suffix: 5 336 requires: exodusii 337 nsize: 2 338 args: -orig_dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/blockcylinder-50.exo 339 args: -orig_dm_plex_interpolate 0 -orig_dm_distribute 0 340 args: -dm_view ascii::ascii_info_detail 341 args: -new_dm_view ascii::ascii_info_detail 342 args: -format hdf5_petsc -use_low_level_functions {{0 1}} 343 args: -dm_plex_view_hdf5_storage_version 1.0.0 344 345 testset: 346 suffix: 6 347 requires: hdf5 !complex datafilespath 348 nsize: {{1 3}} 349 args: -dm_plex_check_symmetry -dm_plex_check_skeleton -dm_plex_check_geometry 350 args: -orig_dm_plex_filename ${DATAFILESPATH}/meshes/cube-hexahedra-refined.h5 -dm_plex_create_from_hdf5_xdmf -dm_plex_hdf5_topology_path /cells -dm_plex_hdf5_geometry_path /coordinates 351 args: -orig_dm_distribute 0 352 args: -format hdf5_petsc -second_write_read -compare -compare_labels 353 args: -orig_dm_plex_interpolate {{0 1}} -distribute {{0 1}} 354 args: -dm_plex_view_hdf5_storage_version {{1.0.0 2.0.0 3.0.0}} 355 356 testset: 357 # the same data and settings as dm_impls_plex_tests-ex18_9% 358 suffix: 9 359 requires: hdf5 !complex datafilespath 360 nsize: {{1 2 4}} 361 args: -dm_plex_check_symmetry -dm_plex_check_skeleton -dm_plex_check_geometry 362 args: -orig_dm_plex_filename ${DATAFILESPATH}/meshes/cube-hexahedra-refined.h5 -dm_plex_create_from_hdf5_xdmf -dm_plex_hdf5_topology_path /cells -dm_plex_hdf5_geometry_path /coordinates 363 args: -orig_dm_distribute 0 364 args: -format {{hdf5_petsc hdf5_xdmf}} -second_write_read -compare 365 args: -dm_plex_view_hdf5_storage_version 3.0.0 366 test: 367 suffix: hdf5_seqload 368 args: -distribute 369 args: -orig_dm_plex_interpolate {{0 1}} 370 args: -dm_plex_hdf5_force_sequential 371 test: 372 suffix: hdf5_seqload_metis 373 requires: parmetis 374 args: -distribute -petscpartitioner_type parmetis 375 args: -orig_dm_plex_interpolate 1 376 args: -dm_plex_hdf5_force_sequential 377 test: 378 suffix: hdf5 379 args: -orig_dm_plex_interpolate 1 380 test: 381 suffix: hdf5_repart 382 requires: parmetis 383 args: -distribute -petscpartitioner_type parmetis 384 args: -orig_dm_plex_interpolate 1 385 test: 386 TODO: Parallel partitioning of uninterpolated meshes not supported 387 suffix: hdf5_repart_ppu 388 requires: parmetis 389 args: -distribute -petscpartitioner_type parmetis 390 args: -orig_dm_plex_interpolate 0 391 392 # reproduce PetscSFView() crash - fixed, left as regression test 393 test: 394 suffix: new_dm_view 395 requires: exodusii 396 nsize: 2 397 args: -orig_dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/TwoQuads.exo -new_dm_view ascii:ex5_new_dm_view.log:ascii_info_detail 398 args: -dm_plex_view_hdf5_storage_version {{1.0.0 2.0.0 3.0.0}} 399 400 # test backward compatibility with petsc_hdf5 format version 1.0.0, serial idempotence 401 testset: 402 suffix: 10-v3.16.0-v1.0.0 403 requires: hdf5 !complex datafilespath 404 args: -dm_plex_check_all -compare -compare_labels 405 args: -dm_plex_view_hdf5_storage_version {{1.0.0 2.0.0 3.0.0}} -use_low_level_functions {{0 1}} 406 test: 407 suffix: a 408 args: -orig_dm_plex_filename ${DATAFILESPATH}/meshes/hdf5-petsc/petsc-v3.16.0/v1.0.0/annulus-20.h5 409 test: 410 suffix: b 411 TODO: broken 412 args: -orig_dm_plex_filename ${DATAFILESPATH}/meshes/hdf5-petsc/petsc-v3.16.0/v1.0.0/barycentricallyrefinedcube.h5 413 test: 414 suffix: c 415 args: -orig_dm_plex_filename ${DATAFILESPATH}/meshes/hdf5-petsc/petsc-v3.16.0/v1.0.0/blockcylinder-50.h5 416 test: 417 suffix: d 418 args: -orig_dm_plex_filename ${DATAFILESPATH}/meshes/hdf5-petsc/petsc-v3.16.0/v1.0.0/cube-hexahedra-refined.h5 419 test: 420 suffix: e 421 args: -orig_dm_plex_filename ${DATAFILESPATH}/meshes/hdf5-petsc/petsc-v3.16.0/v1.0.0/hybrid_hexwedge.h5 422 test: 423 suffix: f 424 args: -orig_dm_plex_filename ${DATAFILESPATH}/meshes/hdf5-petsc/petsc-v3.16.0/v1.0.0/square.h5 425 426 # test permuted sections with petsc_hdf5 format version 1.0.0 427 testset: 428 suffix: 11 429 requires: hdf5 triangle 430 args: -field 431 args: -dm_plex_check_all -compare -compare_labels 432 args: -orig_dm_plex_box_faces 3,3 -dm_plex_view_hdf5_storage_version 1.0.0 433 args: -orig_dm_reorder_section -orig_dm_reorder_section_type reverse 434 435 test: 436 suffix: serial 437 test: 438 suffix: serial_no_perm 439 args: -orig_dm_ignore_perm_output 440 441 TEST*/ 442