1 static char help[] = "Demonstrate HDF5/XDMF load-save-reload cycle\n\n"; 2 3 #include <petscdmplex.h> 4 #include <petscviewerhdf5.h> 5 #define EX "ex5.c" 6 7 typedef struct { 8 char infile[PETSC_MAX_PATH_LEN]; /* Input mesh filename */ 9 char outfile[PETSC_MAX_PATH_LEN]; /* Dump/reload mesh filename */ 10 PetscViewerFormat informat; /* Input mesh format */ 11 PetscViewerFormat outformat; /* Dump/reload mesh format */ 12 PetscBool redistribute; /* Redistribute the mesh */ 13 PetscBool heterogeneous; /* Test save on N / load on M */ 14 PetscInt ntimes; /* How many times do the cycle */ 15 } AppCtx; 16 17 static PetscErrorCode ProcessOptions(MPI_Comm comm, AppCtx *options) 18 { 19 PetscBool flg; 20 PetscErrorCode ierr; 21 22 PetscFunctionBeginUser; 23 options->infile[0] = '\0'; 24 options->outfile[0] = '\0'; 25 options->informat = PETSC_VIEWER_HDF5_XDMF; 26 options->outformat = PETSC_VIEWER_HDF5_XDMF; 27 options->redistribute = PETSC_TRUE; 28 options->heterogeneous = PETSC_FALSE; 29 options->ntimes = 2; 30 ierr = PetscOptionsBegin(comm, "", "Meshing Problem Options", "DMPLEX");PetscCall(ierr); 31 PetscCall(PetscOptionsString("-infile", "The input mesh file", EX, options->infile, options->infile, sizeof(options->infile), &flg)); 32 PetscCheck(flg,comm, PETSC_ERR_USER_INPUT, "-infile needs to be specified"); 33 PetscCall(PetscOptionsString("-outfile", "The output mesh file (by default it's the same as infile)", EX, options->outfile, options->outfile, sizeof(options->outfile), &flg)); 34 PetscCheck(flg,comm, PETSC_ERR_USER_INPUT, "-outfile needs to be specified"); 35 PetscCall(PetscOptionsEnum("-informat", "Input mesh format", EX, PetscViewerFormats, (PetscEnum)options->informat, (PetscEnum*)&options->informat, NULL)); 36 PetscCall(PetscOptionsEnum("-outformat", "Dump/reload mesh format", EX, PetscViewerFormats, (PetscEnum)options->outformat, (PetscEnum*)&options->outformat, NULL)); 37 PetscCall(PetscOptionsBool("-redistribute", "Redistribute the mesh", EX, options->redistribute, &options->redistribute, NULL)); 38 PetscCall(PetscOptionsBool("-heterogeneous", "Test save on N / load on M", EX, options->heterogeneous, &options->heterogeneous, NULL)); 39 PetscCall(PetscOptionsInt("-ntimes", "How many times do the cycle", EX, options->ntimes, &options->ntimes, NULL)); 40 ierr = PetscOptionsEnd();PetscCall(ierr); 41 PetscFunctionReturn(0); 42 }; 43 44 //TODO test DMLabel I/O (not yet working for PETSC_VIEWER_HDF5_XDMF) 45 int main(int argc, char **argv) 46 { 47 AppCtx user; 48 MPI_Comm comm; 49 PetscMPIInt gsize, grank, mycolor; 50 PetscInt i; 51 PetscBool flg; 52 const char exampleDMPlexName[] = "DMPlex Object"; 53 const char *infilename; 54 PetscViewerFormat informat; 55 56 PetscCall(PetscInitialize(&argc, &argv, NULL,help)); 57 PetscCall(ProcessOptions(PETSC_COMM_WORLD, &user)); 58 PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD,&gsize)); 59 PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD,&grank)); 60 61 for (i=0; i<user.ntimes; i++) { 62 if (i==0) { 63 /* Use infile/informat for the initial load */ 64 infilename = user.infile; 65 informat = user.informat; 66 } else { 67 /* Use outfile/outformat for all I/O except the very initial load */ 68 infilename = user.outfile; 69 informat = user.outformat; 70 } 71 72 if (user.heterogeneous) { 73 mycolor = (PetscMPIInt)(grank > user.ntimes-i); 74 } else { 75 mycolor = (PetscMPIInt)0; 76 /* comm = PETSC_COMM_WORLD; */ 77 } 78 PetscCallMPI(MPI_Comm_split(PETSC_COMM_WORLD,mycolor,grank,&comm)); 79 80 if (mycolor == 0) { 81 /* Load/Save only on processes with mycolor == 0 */ 82 DM dm; 83 PetscPartitioner part; 84 PetscViewer v; 85 86 PetscCall(PetscPrintf(comm, "Begin cycle %D\n",i)); 87 88 /* Load data from XDMF into dm in parallel */ 89 /* We could also use 90 PetscCall(DMPlexCreateFromFile(PETSC_COMM_WORLD, user.filename, "ex5_plex", PETSC_TRUE, &dm)); 91 This currently support a few more formats than DMLoad(). 92 */ 93 PetscCall(PetscViewerHDF5Open(comm, infilename, FILE_MODE_READ, &v)); 94 PetscCall(PetscViewerPushFormat(v, informat)); 95 PetscCall(DMCreate(comm, &dm)); 96 PetscCall(DMSetType(dm, DMPLEX)); 97 PetscCall(PetscObjectSetName((PetscObject) dm, exampleDMPlexName)); 98 PetscCall(DMSetOptionsPrefix(dm,"loaded_")); 99 PetscCall(DMLoad(dm, v)); 100 PetscCall(DMPlexDistributeSetDefault(dm, PETSC_FALSE)); 101 PetscCall(DMSetFromOptions(dm)); 102 PetscCall(DMViewFromOptions(dm, NULL, "-dm_view")); 103 PetscCall(PetscViewerPopFormat(v)); 104 PetscCall(PetscViewerDestroy(&v)); 105 106 /* We just test/demonstrate DM is indeed distributed - unneeded in the application code */ 107 PetscCall(DMPlexIsDistributed(dm, &flg)); 108 PetscCall(PetscPrintf(comm, "Loaded mesh distributed? %s\n", PetscBools[flg])); 109 110 /* Interpolate */ 111 //TODO we want to be able to do this from options in DMSetFromOptions() probably 112 //TODO we want to be able to do this in-place 113 { 114 DM idm; 115 116 PetscCall(DMPlexInterpolate(dm, &idm)); 117 PetscCall(DMDestroy(&dm)); 118 dm = idm; 119 PetscCall(DMSetOptionsPrefix(dm,"interpolated_")); 120 PetscCall(DMSetFromOptions(dm)); 121 PetscCall(DMViewFromOptions(dm, NULL, "-dm_view")); 122 } 123 124 /* Redistribute */ 125 //TODO we want to be able to do this from options in DMSetFromOptions() probably 126 if (user.redistribute) { 127 DM dmdist; 128 129 PetscCall(DMPlexGetPartitioner(dm, &part)); 130 PetscCall(PetscPartitionerSetFromOptions(part)); 131 PetscCall(DMPlexDistribute(dm, 0, NULL, &dmdist)); 132 //TODO we want to be able to do this in-place 133 if (dmdist) { 134 PetscCall(DMDestroy(&dm)); 135 dm = dmdist; 136 PetscCall(DMSetOptionsPrefix(dm,"redistributed_")); 137 PetscCall(DMSetFromOptions(dm)); 138 PetscCall(DMViewFromOptions(dm, NULL, "-dm_view")); 139 } 140 } 141 142 /* Save redistributed dm to XDMF in parallel and destroy it */ 143 PetscCall(PetscViewerHDF5Open(comm, user.outfile, FILE_MODE_WRITE, &v)); 144 PetscCall(PetscViewerPushFormat(v, user.outformat)); 145 PetscCall(PetscObjectSetName((PetscObject) dm, exampleDMPlexName)); 146 PetscCall(DMView(dm, v)); 147 PetscCall(PetscViewerPopFormat(v)); 148 PetscCall(PetscViewerDestroy(&v)); 149 PetscCall(DMDestroy(&dm)); 150 151 PetscCall(PetscPrintf(comm, "End cycle %D\n--------\n",i)); 152 } 153 PetscCallMPI(MPI_Comm_free(&comm)); 154 PetscCallMPI(MPI_Barrier(PETSC_COMM_WORLD)); 155 } 156 157 /* Final clean-up */ 158 PetscCall(PetscFinalize()); 159 return 0; 160 } 161 162 /*TEST 163 build: 164 requires: hdf5 165 testset: 166 suffix: 0 167 requires: !complex 168 nsize: 4 169 args: -infile ${wPETSC_DIR}/share/petsc/datafiles/meshes/blockcylinder-50.h5 -informat hdf5_xdmf 170 args: -outfile ex5_dump.h5 -outformat {{hdf5_xdmf hdf5_petsc}separate output} 171 args: -ntimes 3 172 args: -loaded_dm_view -interpolated_dm_view -redistributed_dm_view 173 test: 174 # this partitioner should not shuffle anything, it should yield the same partititioning as the XDMF reader - added just for testing 175 suffix: simple 176 args: -petscpartitioner_type simple 177 test: 178 suffix: parmetis 179 requires: parmetis 180 args: -petscpartitioner_type parmetis 181 test: 182 suffix: ptscotch 183 requires: ptscotch 184 args: -petscpartitioner_type ptscotch 185 186 testset: 187 suffix: 1 188 requires: !complex 189 nsize: 4 190 args: -infile ${wPETSC_DIR}/share/petsc/datafiles/meshes/blockcylinder-50.h5 -informat hdf5_xdmf 191 args: -outfile ex5_dump.h5 -outformat {{hdf5_xdmf hdf5_petsc}separate output} 192 args: -ntimes 3 193 args: -heterogeneous True 194 args: -loaded_dm_view -interpolated_dm_view -redistributed_dm_view 195 test: 196 suffix: simple 197 args: -petscpartitioner_type simple 198 test: 199 suffix: parmetis 200 requires: parmetis 201 args: -petscpartitioner_type parmetis 202 test: 203 suffix: ptscotch 204 requires: ptscotch 205 args: -petscpartitioner_type ptscotch 206 207 TEST*/ 208