1 static char help[] = "Load and save the mesh and fields to HDF5 and ExodusII\n\n"; 2 3 #include <petscdmplex.h> 4 #include <petscviewerhdf5.h> 5 #include <petscsf.h> 6 7 typedef struct { 8 PetscBool compare; /* Compare the meshes using DMPlexEqual() */ 9 PetscBool compare_labels; /* Compare labels in the meshes using DMCompareLabels() */ 10 PetscBool distribute; /* Distribute the mesh */ 11 PetscBool interpolate; /* Generate intermediate mesh elements */ 12 char filename[PETSC_MAX_PATH_LEN]; /* Mesh filename */ 13 char meshname[PETSC_MAX_PATH_LEN]; /* Mesh name */ 14 PetscViewerFormat format; /* Format to write and read */ 15 PetscBool second_write_read; /* Write and read for the 2nd time */ 16 PetscBool use_low_level_functions; /* Use low level functions for viewing and loading */ 17 } AppCtx; 18 19 static PetscErrorCode ProcessOptions(MPI_Comm comm, AppCtx *options) 20 { 21 PetscErrorCode ierr; 22 23 PetscFunctionBeginUser; 24 options->compare = PETSC_FALSE; 25 options->compare_labels = PETSC_FALSE; 26 options->distribute = PETSC_TRUE; 27 options->interpolate = PETSC_FALSE; 28 options->filename[0] = '\0'; 29 options->meshname[0] = '\0'; 30 options->format = PETSC_VIEWER_DEFAULT; 31 options->second_write_read = PETSC_FALSE; 32 options->use_low_level_functions = PETSC_FALSE; 33 34 ierr = PetscOptionsBegin(comm, "", "Meshing Problem Options", "DMPLEX");CHKERRQ(ierr); 35 ierr = PetscOptionsBool("-compare", "Compare the meshes using DMPlexEqual()", "ex55.c", options->compare, &options->compare, NULL);CHKERRQ(ierr); 36 ierr = PetscOptionsBool("-compare_labels", "Compare labels in the meshes using DMCompareLabels()", "ex55.c", options->compare_labels, &options->compare_labels, NULL);CHKERRQ(ierr); 37 ierr = PetscOptionsBool("-distribute", "Distribute the mesh", "ex55.c", options->distribute, &options->distribute, NULL);CHKERRQ(ierr); 38 ierr = PetscOptionsBool("-interpolate", "Generate intermediate mesh elements", "ex55.c", options->interpolate, &options->interpolate, NULL);CHKERRQ(ierr); 39 ierr = PetscOptionsString("-filename", "The mesh file", "ex55.c", options->filename, options->filename, sizeof(options->filename), NULL);CHKERRQ(ierr); 40 ierr = PetscOptionsString("-meshname", "The mesh file", "ex55.c", options->meshname, options->meshname, sizeof(options->meshname), NULL);CHKERRQ(ierr); 41 ierr = PetscOptionsEnum("-format", "Format to write and read", "ex55.c", PetscViewerFormats, (PetscEnum)options->format, (PetscEnum*)&options->format, NULL);CHKERRQ(ierr); 42 ierr = PetscOptionsBool("-second_write_read", "Write and read for the 2nd time", "ex55.c", options->second_write_read, &options->second_write_read, NULL);CHKERRQ(ierr); 43 ierr = PetscOptionsBool("-use_low_level_functions", "Use low level functions for viewing and loading", "ex55.c", options->use_low_level_functions, &options->use_low_level_functions, NULL);CHKERRQ(ierr); 44 ierr = PetscOptionsEnd();CHKERRQ(ierr); 45 PetscFunctionReturn(0); 46 }; 47 48 static PetscErrorCode DMPlexWriteAndReadHDF5(DM dm, const char filename[], const char prefix[], AppCtx user, DM *dm_new) 49 { 50 DM dmnew; 51 const char savedName[] = "Mesh"; 52 const char loadedName[] = "Mesh_new"; 53 PetscViewer v; 54 PetscErrorCode ierr; 55 56 PetscFunctionBeginUser; 57 ierr = PetscViewerHDF5Open(PetscObjectComm((PetscObject) dm), filename, FILE_MODE_WRITE, &v);CHKERRQ(ierr); 58 ierr = PetscViewerPushFormat(v, user.format);CHKERRQ(ierr); 59 ierr = PetscObjectSetName((PetscObject) dm, savedName);CHKERRQ(ierr); 60 if (user.use_low_level_functions) { 61 ierr = DMPlexTopologyView(dm, v);CHKERRQ(ierr); 62 ierr = DMPlexCoordinatesView(dm, v);CHKERRQ(ierr); 63 ierr = DMPlexLabelsView(dm, v);CHKERRQ(ierr); 64 } else { 65 ierr = DMView(dm, v);CHKERRQ(ierr); 66 } 67 68 ierr = PetscViewerFileSetMode(v, FILE_MODE_READ);CHKERRQ(ierr); 69 ierr = DMCreate(PETSC_COMM_WORLD, &dmnew);CHKERRQ(ierr); 70 ierr = DMSetType(dmnew, DMPLEX);CHKERRQ(ierr); 71 ierr = PetscObjectSetName((PetscObject) dmnew, savedName);CHKERRQ(ierr); 72 ierr = DMSetOptionsPrefix(dmnew, prefix);CHKERRQ(ierr); 73 if (user.use_low_level_functions) { 74 PetscSF sfXC; 75 76 ierr = DMPlexTopologyLoad(dmnew, v, &sfXC);CHKERRQ(ierr); 77 ierr = DMPlexCoordinatesLoad(dmnew, v, sfXC);CHKERRQ(ierr); 78 ierr = DMPlexLabelsLoad(dmnew, v, sfXC);CHKERRQ(ierr); 79 ierr = PetscSFDestroy(&sfXC);CHKERRQ(ierr); 80 } else { 81 ierr = DMLoad(dmnew, v);CHKERRQ(ierr); 82 } 83 ierr = PetscObjectSetName((PetscObject)dmnew,loadedName);CHKERRQ(ierr); 84 85 ierr = PetscViewerPopFormat(v);CHKERRQ(ierr); 86 ierr = PetscViewerDestroy(&v);CHKERRQ(ierr); 87 *dm_new = dmnew; 88 PetscFunctionReturn(0); 89 } 90 91 int main(int argc, char **argv) 92 { 93 DM dm, dmnew; 94 PetscPartitioner part; 95 AppCtx user; 96 PetscBool flg; 97 PetscErrorCode ierr; 98 99 ierr = PetscInitialize(&argc, &argv, NULL,help);if (ierr) return ierr; 100 ierr = ProcessOptions(PETSC_COMM_WORLD, &user);CHKERRQ(ierr); 101 ierr = DMPlexCreateFromFile(PETSC_COMM_WORLD, user.filename, user.meshname, user.interpolate, &dm);CHKERRQ(ierr); 102 ierr = DMSetOptionsPrefix(dm,"orig_");CHKERRQ(ierr); 103 ierr = DMViewFromOptions(dm, NULL, "-dm_view");CHKERRQ(ierr); 104 105 if (user.distribute) { 106 DM dmdist; 107 108 ierr = DMPlexGetPartitioner(dm, &part);CHKERRQ(ierr); 109 ierr = PetscPartitionerSetFromOptions(part);CHKERRQ(ierr); 110 ierr = DMPlexDistribute(dm, 0, NULL, &dmdist);CHKERRQ(ierr); 111 if (dmdist) { 112 ierr = DMDestroy(&dm);CHKERRQ(ierr); 113 dm = dmdist; 114 } 115 } 116 117 ierr = DMSetOptionsPrefix(dm,NULL);CHKERRQ(ierr); 118 ierr = DMSetFromOptions(dm);CHKERRQ(ierr); 119 ierr = DMViewFromOptions(dm, NULL, "-dm_view");CHKERRQ(ierr); 120 121 ierr = DMPlexWriteAndReadHDF5(dm, "dmdist.h5", "new_", user, &dmnew);CHKERRQ(ierr); 122 123 if (user.second_write_read) { 124 ierr = DMDestroy(&dm);CHKERRQ(ierr); 125 dm = dmnew; 126 ierr = DMPlexWriteAndReadHDF5(dm, "dmdist.h5", "new_", user, &dmnew);CHKERRQ(ierr); 127 } 128 129 ierr = DMViewFromOptions(dmnew, NULL, "-dm_view");CHKERRQ(ierr); 130 /* TODO: Is it still true? */ 131 /* The NATIVE format for coordiante viewing is killing parallel output, since we have a local vector. Map it to global, and it will work. */ 132 133 /* This currently makes sense only for sequential meshes. */ 134 if (user.compare) { 135 ierr = DMPlexEqual(dmnew, dm, &flg);CHKERRQ(ierr); 136 PetscCheckFalse(!flg,PETSC_COMM_WORLD, PETSC_ERR_ARG_INCOMP, "DMs are not equal"); 137 ierr = PetscPrintf(PETSC_COMM_WORLD,"DMs equal\n");CHKERRQ(ierr); 138 } 139 if (user.compare_labels) { 140 ierr = DMCompareLabels(dmnew, dm, NULL, NULL);CHKERRQ(ierr); 141 ierr = PetscPrintf(PETSC_COMM_WORLD,"DMLabels equal\n");CHKERRQ(ierr); 142 } 143 144 ierr = DMDestroy(&dm);CHKERRQ(ierr); 145 ierr = DMDestroy(&dmnew);CHKERRQ(ierr); 146 ierr = PetscFinalize(); 147 return ierr; 148 } 149 150 /*TEST 151 build: 152 requires: hdf5 153 # Idempotence of saving/loading 154 # Have to replace Exodus file, which is creating uninterpolated edges 155 test: 156 suffix: 0 157 TODO: broken 158 requires: exodusii 159 args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/Rect-tri3.exo -dm_view ascii::ascii_info_detail 160 args: -format hdf5_petsc -compare 161 test: 162 suffix: 1 163 TODO: broken 164 requires: exodusii parmetis !defined(PETSC_USE_64BIT_INDICES) 165 nsize: 2 166 args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/Rect-tri3.exo -dm_view ascii::ascii_info_detail 167 args: -petscpartitioner_type parmetis 168 args: -format hdf5_petsc -new_dm_view ascii::ascii_info_detail 169 testset: 170 requires: exodusii 171 args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/blockcylinder-50.exo 172 args: -petscpartitioner_type simple 173 args: -dm_view ascii::ascii_info_detail 174 args: -new_dm_view ascii::ascii_info_detail 175 test: 176 suffix: 2 177 nsize: {{1 2 4 8}separate output} 178 args: -format {{default hdf5_petsc}separate output} 179 args: -interpolate {{0 1}separate output} 180 test: 181 suffix: 2a 182 nsize: {{1 2 4 8}separate output} 183 args: -format {{hdf5_xdmf hdf5_viz}separate output} 184 test: 185 suffix: 3 186 requires: exodusii 187 args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/blockcylinder-50.exo -compare -compare_labels 188 189 # Load HDF5 file in XDMF format in parallel, write, read dm1, write, read dm2, and compare dm1 and dm2 190 testset: 191 suffix: 4 192 requires: !complex 193 args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/blockcylinder-50.h5 -dm_plex_create_from_hdf5_xdmf 194 args: -distribute 0 -second_write_read -compare 195 test: 196 suffix: hdf5_petsc 197 nsize: {{1 2}} 198 args: -format hdf5_petsc -compare_labels 199 test: 200 suffix: hdf5_xdmf 201 nsize: {{1 3 8}} 202 args: -format hdf5_xdmf 203 204 # Use low level functions, DMPlexTopologyView()/Load(), DMPlexCoordinatesView()/Load(), and DMPlexLabelsView()/Load() 205 # Output must be the same as ex55_2_nsize-2_format-hdf5_petsc_interpolate-0.out 206 test: 207 suffix: 5 208 requires: exodusii 209 nsize: 2 210 args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/blockcylinder-50.exo 211 args: -petscpartitioner_type simple 212 args: -dm_view ascii::ascii_info_detail 213 args: -new_dm_view ascii::ascii_info_detail 214 args: -format hdf5_petsc -use_low_level_functions 215 216 testset: 217 suffix: 6 218 requires: hdf5 !complex datafilespath 219 nsize: {{1 3}} 220 args: -dm_plex_check_symmetry -dm_plex_check_skeleton -dm_plex_check_geometry 221 args: -filename ${DATAFILESPATH}/meshes/cube-hexahedra-refined.h5 -dm_plex_create_from_hdf5_xdmf -dm_plex_hdf5_topology_path /cells -dm_plex_hdf5_geometry_path /coordinates 222 args: -format hdf5_petsc -second_write_read -compare -compare_labels 223 args: -interpolate {{0 1}} -distribute {{0 1}} -petscpartitioner_type simple 224 225 testset: 226 # the same data and settings as dm_impls_plex_tests-ex18_9% 227 suffix: 9 228 requires: hdf5 !complex datafilespath 229 nsize: {{1 2 4}} 230 args: -dm_plex_check_symmetry -dm_plex_check_skeleton -dm_plex_check_geometry 231 args: -filename ${DATAFILESPATH}/meshes/cube-hexahedra-refined.h5 -dm_plex_create_from_hdf5_xdmf -dm_plex_hdf5_topology_path /cells -dm_plex_hdf5_geometry_path /coordinates 232 args: -format hdf5_xdmf -second_write_read -compare 233 test: 234 suffix: hdf5_seqload 235 args: -distribute -petscpartitioner_type simple 236 args: -interpolate {{0 1}} 237 args: -dm_plex_hdf5_force_sequential 238 test: 239 suffix: hdf5_seqload_metis 240 requires: parmetis 241 args: -distribute -petscpartitioner_type parmetis 242 args: -interpolate 1 243 args: -dm_plex_hdf5_force_sequential 244 test: 245 suffix: hdf5 246 args: -interpolate 1 -petscpartitioner_type simple 247 test: 248 suffix: hdf5_repart 249 requires: parmetis 250 args: -distribute -petscpartitioner_type parmetis 251 args: -interpolate 1 252 test: 253 TODO: Parallel partitioning of uninterpolated meshes not supported 254 suffix: hdf5_repart_ppu 255 requires: parmetis 256 args: -distribute -petscpartitioner_type parmetis 257 args: -interpolate 0 258 259 # reproduce PetscSFView() crash - fixed, left as regression test 260 test: 261 suffix: new_dm_view 262 requires: exodusii 263 nsize: 2 264 args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/TwoQuads.exo -new_dm_view ascii:ex5_new_dm_view.log:ascii_info_detail 265 266 # test backward compatibility of petsc_hdf5 format 267 testset: 268 suffix: 10-v3.16.0-v1.0.0 269 requires: hdf5 !complex datafilespath 270 args: -dm_plex_check_all -compare -compare_labels 271 args: -dm_plex_view_hdf5_storage_version {{1.0.0 2.0.0}} -use_low_level_functions {{0 1}} 272 test: 273 suffix: a 274 args: -filename ${DATAFILESPATH}/meshes/hdf5-petsc/petsc-v3.16.0/v1.0.0/annulus-20.h5 275 test: 276 suffix: b 277 TODO: broken 278 args: -filename ${DATAFILESPATH}/meshes/hdf5-petsc/petsc-v3.16.0/v1.0.0/barycentricallyrefinedcube.h5 279 test: 280 suffix: c 281 args: -filename ${DATAFILESPATH}/meshes/hdf5-petsc/petsc-v3.16.0/v1.0.0/blockcylinder-50.h5 282 test: 283 suffix: d 284 args: -filename ${DATAFILESPATH}/meshes/hdf5-petsc/petsc-v3.16.0/v1.0.0/cube-hexahedra-refined.h5 285 test: 286 suffix: e 287 args: -filename ${DATAFILESPATH}/meshes/hdf5-petsc/petsc-v3.16.0/v1.0.0/hybrid_hexwedge.h5 288 test: 289 suffix: f 290 args: -filename ${DATAFILESPATH}/meshes/hdf5-petsc/petsc-v3.16.0/v1.0.0/square.h5 291 292 TEST*/ 293