1 static char help[] = "Demonstrate HDF5 parallel load-save-reload cycle\n\n"; 2 3 #include <petscdmplex.h> 4 #include <petscviewerhdf5.h> 5 #define EX "ex5.c" 6 7 typedef struct { 8 char infile[PETSC_MAX_PATH_LEN]; /* Input mesh filename */ 9 char outfile[PETSC_MAX_PATH_LEN]; /* Dump/reload mesh filename */ 10 PetscViewerFormat informat; /* Input mesh format */ 11 PetscViewerFormat outformat; /* Dump/reload mesh format */ 12 PetscBool heterogeneous; /* Test save on N / load on M */ 13 PetscInt ntimes; /* How many times do the cycle */ 14 } AppCtx; 15 16 static PetscErrorCode ProcessOptions(MPI_Comm comm, AppCtx *options) 17 { 18 PetscBool flg; 19 20 PetscFunctionBeginUser; 21 options->infile[0] = '\0'; 22 options->outfile[0] = '\0'; 23 options->informat = PETSC_VIEWER_HDF5_XDMF; 24 options->outformat = PETSC_VIEWER_HDF5_XDMF; 25 options->heterogeneous = PETSC_FALSE; 26 options->ntimes = 2; 27 PetscOptionsBegin(comm, "", "Meshing Problem Options", "DMPLEX"); 28 PetscCall(PetscOptionsString("-infile", "The input mesh file", EX, options->infile, options->infile, sizeof(options->infile), &flg)); 29 PetscCheck(flg,comm, PETSC_ERR_USER_INPUT, "-infile needs to be specified"); 30 PetscCall(PetscOptionsString("-outfile", "The output mesh file (by default it's the same as infile)", EX, options->outfile, options->outfile, sizeof(options->outfile), &flg)); 31 PetscCheck(flg,comm, PETSC_ERR_USER_INPUT, "-outfile needs to be specified"); 32 PetscCall(PetscOptionsEnum("-informat", "Input mesh format", EX, PetscViewerFormats, (PetscEnum)options->informat, (PetscEnum*)&options->informat, NULL)); 33 PetscCall(PetscOptionsEnum("-outformat", "Dump/reload mesh format", EX, PetscViewerFormats, (PetscEnum)options->outformat, (PetscEnum*)&options->outformat, NULL)); 34 PetscCall(PetscOptionsBool("-heterogeneous", "Test save on N / load on M", EX, options->heterogeneous, &options->heterogeneous, NULL)); 35 PetscCall(PetscOptionsInt("-ntimes", "How many times do the cycle", EX, options->ntimes, &options->ntimes, NULL)); 36 PetscOptionsEnd(); 37 PetscFunctionReturn(0); 38 }; 39 40 //TODO test DMLabel I/O (not yet working for PETSC_VIEWER_HDF5_XDMF) 41 int main(int argc, char **argv) 42 { 43 AppCtx user; 44 MPI_Comm comm; 45 PetscMPIInt gsize, grank, mycolor; 46 PetscInt i; 47 PetscBool flg; 48 const char exampleDMPlexName[] = "DMPlex Object"; 49 const char *infilename; 50 PetscViewerFormat informat; 51 52 PetscCall(PetscInitialize(&argc, &argv, NULL,help)); 53 PetscCall(ProcessOptions(PETSC_COMM_WORLD, &user)); 54 PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &gsize)); 55 PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &grank)); 56 57 for (i=0; i<user.ntimes; i++) { 58 if (i==0) { 59 /* Use infile/informat for the initial load */ 60 infilename = user.infile; 61 informat = user.informat; 62 } else { 63 /* Use outfile/outformat for all I/O except the very initial load */ 64 infilename = user.outfile; 65 informat = user.outformat; 66 } 67 68 if (user.heterogeneous) { 69 mycolor = (PetscMPIInt) (grank > user.ntimes-i); 70 } else { 71 mycolor = (PetscMPIInt) 0; 72 } 73 PetscCallMPI(MPI_Comm_split(PETSC_COMM_WORLD, mycolor, grank, &comm)); 74 75 if (mycolor == 0) { 76 /* Load/Save only on processes with mycolor == 0 */ 77 DM dm; 78 PetscViewer v; 79 80 PetscCall(PetscPrintf(comm, "Begin cycle %" PetscInt_FMT "\n", i)); 81 82 /* Load data from XDMF into dm in parallel */ 83 /* We could also use 84 PetscCall(DMPlexCreateFromFile(PETSC_COMM_WORLD, user.filename, "ex5_plex", PETSC_TRUE, &dm)); 85 This currently support a few more formats than DMLoad(). 86 */ 87 PetscCall(PetscViewerHDF5Open(comm, infilename, FILE_MODE_READ, &v)); 88 PetscCall(PetscViewerPushFormat(v, informat)); 89 PetscCall(DMCreate(comm, &dm)); 90 PetscCall(DMSetType(dm, DMPLEX)); 91 PetscCall(PetscObjectSetName((PetscObject) dm, exampleDMPlexName)); 92 PetscCall(DMSetOptionsPrefix(dm,"loaded_")); 93 PetscCall(DMLoad(dm, v)); 94 PetscCall(DMPlexDistributeSetDefault(dm, PETSC_FALSE)); 95 PetscCall(DMSetFromOptions(dm)); 96 PetscCall(DMViewFromOptions(dm, NULL, "-dm_view")); 97 PetscCall(PetscViewerPopFormat(v)); 98 PetscCall(PetscViewerDestroy(&v)); 99 100 /* We just test/demonstrate DM is indeed distributed - unneeded in the application code */ 101 PetscCall(DMPlexIsDistributed(dm, &flg)); 102 PetscCall(PetscPrintf(comm, "Loaded mesh distributed? %s\n", PetscBools[flg])); 103 104 /* Interpolate */ 105 PetscCall(DMSetOptionsPrefix(dm,"interpolated_")); 106 PetscCall(DMSetFromOptions(dm)); 107 PetscCall(DMViewFromOptions(dm, NULL, "-dm_view")); 108 109 /* Redistribute */ 110 PetscCall(DMSetOptionsPrefix(dm,"redistributed_")); 111 PetscCall(DMSetFromOptions(dm)); 112 PetscCall(DMViewFromOptions(dm, NULL, "-dm_view")); 113 114 /* Save redistributed dm to XDMF in parallel and destroy it */ 115 PetscCall(PetscViewerHDF5Open(comm, user.outfile, FILE_MODE_WRITE, &v)); 116 PetscCall(PetscViewerPushFormat(v, user.outformat)); 117 PetscCall(PetscObjectSetName((PetscObject) dm, exampleDMPlexName)); 118 PetscCall(DMView(dm, v)); 119 PetscCall(PetscViewerPopFormat(v)); 120 PetscCall(PetscViewerDestroy(&v)); 121 PetscCall(DMDestroy(&dm)); 122 123 PetscCall(PetscPrintf(comm, "End cycle %" PetscInt_FMT "\n--------\n",i)); 124 } 125 PetscCallMPI(MPI_Comm_free(&comm)); 126 PetscCallMPI(MPI_Barrier(PETSC_COMM_WORLD)); 127 } 128 129 /* Final clean-up */ 130 PetscCall(PetscFinalize()); 131 return 0; 132 } 133 134 /*TEST 135 build: 136 requires: hdf5 137 testset: 138 suffix: 0 139 requires: !complex 140 nsize: 4 141 args: -infile ${wPETSC_DIR}/share/petsc/datafiles/meshes/blockcylinder-50.h5 -informat hdf5_xdmf 142 args: -outfile ex5_dump.h5 -outformat {{hdf5_xdmf hdf5_petsc}separate output} 143 args: -ntimes 3 -interpolated_dm_plex_interpolate_pre -redistributed_dm_distribute 144 args: -loaded_dm_view -interpolated_dm_view -redistributed_dm_view 145 test: 146 # this partitioner should not shuffle anything, it should yield the same partititioning as the XDMF reader - added just for testing 147 suffix: simple 148 args: -petscpartitioner_type simple 149 test: 150 suffix: parmetis 151 requires: parmetis 152 args: -petscpartitioner_type parmetis 153 test: 154 suffix: ptscotch 155 requires: ptscotch 156 args: -petscpartitioner_type ptscotch 157 158 testset: 159 suffix: 1 160 requires: !complex 161 nsize: 4 162 args: -infile ${wPETSC_DIR}/share/petsc/datafiles/meshes/blockcylinder-50.h5 -informat hdf5_xdmf 163 args: -outfile ex5_dump.h5 -outformat {{hdf5_xdmf hdf5_petsc}separate output} 164 args: -ntimes 3 -interpolated_dm_plex_interpolate_pre -redistributed_dm_distribute 165 args: -heterogeneous True 166 args: -loaded_dm_view -interpolated_dm_view -redistributed_dm_view 167 test: 168 suffix: simple 169 args: -petscpartitioner_type simple 170 test: 171 suffix: parmetis 172 requires: parmetis 173 args: -petscpartitioner_type parmetis 174 test: 175 suffix: ptscotch 176 requires: ptscotch 177 args: -petscpartitioner_type ptscotch 178 179 TEST*/ 180