xref: /petsc/src/dm/impls/plex/tutorials/ex5.c (revision d0609ced746bc51b019815ca91d747429db24893)
1ffa8c570SMatthew G. Knepley static char help[] = "Demonstrate HDF5 parallel load-save-reload cycle\n\n";
27d26aeb3SVaclav Hapla 
37d26aeb3SVaclav Hapla #include <petscdmplex.h>
47d26aeb3SVaclav Hapla #include <petscviewerhdf5.h>
57d26aeb3SVaclav Hapla #define EX "ex5.c"
67d26aeb3SVaclav Hapla 
77d26aeb3SVaclav Hapla typedef struct {
87d26aeb3SVaclav Hapla   char              infile[PETSC_MAX_PATH_LEN];  /* Input mesh filename */
97d26aeb3SVaclav Hapla   char              outfile[PETSC_MAX_PATH_LEN]; /* Dump/reload mesh filename */
107d26aeb3SVaclav Hapla   PetscViewerFormat informat;                    /* Input mesh format */
117d26aeb3SVaclav Hapla   PetscViewerFormat outformat;                   /* Dump/reload mesh format */
12efa12513Sksagiyam   PetscBool         heterogeneous;               /* Test save on N / load on M */
137d26aeb3SVaclav Hapla   PetscInt          ntimes;                      /* How many times do the cycle */
147d26aeb3SVaclav Hapla } AppCtx;
157d26aeb3SVaclav Hapla 
167d26aeb3SVaclav Hapla static PetscErrorCode ProcessOptions(MPI_Comm comm, AppCtx *options)
177d26aeb3SVaclav Hapla {
187d26aeb3SVaclav Hapla   PetscBool      flg;
197d26aeb3SVaclav Hapla 
207d26aeb3SVaclav Hapla   PetscFunctionBeginUser;
217d26aeb3SVaclav Hapla   options->infile[0]     = '\0';
227d26aeb3SVaclav Hapla   options->outfile[0]    = '\0';
237d26aeb3SVaclav Hapla   options->informat      = PETSC_VIEWER_HDF5_XDMF;
247d26aeb3SVaclav Hapla   options->outformat     = PETSC_VIEWER_HDF5_XDMF;
25efa12513Sksagiyam   options->heterogeneous = PETSC_FALSE;
267d26aeb3SVaclav Hapla   options->ntimes        = 2;
27*d0609cedSBarry Smith   PetscOptionsBegin(comm, "", "Meshing Problem Options", "DMPLEX");
289566063dSJacob Faibussowitsch   PetscCall(PetscOptionsString("-infile", "The input mesh file", EX, options->infile, options->infile, sizeof(options->infile), &flg));
2928b400f6SJacob Faibussowitsch   PetscCheck(flg,comm, PETSC_ERR_USER_INPUT, "-infile needs to be specified");
309566063dSJacob Faibussowitsch   PetscCall(PetscOptionsString("-outfile", "The output mesh file (by default it's the same as infile)", EX, options->outfile, options->outfile, sizeof(options->outfile), &flg));
3128b400f6SJacob Faibussowitsch   PetscCheck(flg,comm, PETSC_ERR_USER_INPUT, "-outfile needs to be specified");
329566063dSJacob Faibussowitsch   PetscCall(PetscOptionsEnum("-informat", "Input mesh format", EX, PetscViewerFormats, (PetscEnum)options->informat, (PetscEnum*)&options->informat, NULL));
339566063dSJacob Faibussowitsch   PetscCall(PetscOptionsEnum("-outformat", "Dump/reload mesh format", EX, PetscViewerFormats, (PetscEnum)options->outformat, (PetscEnum*)&options->outformat, NULL));
349566063dSJacob Faibussowitsch   PetscCall(PetscOptionsBool("-heterogeneous", "Test save on N / load on M", EX, options->heterogeneous, &options->heterogeneous, NULL));
359566063dSJacob Faibussowitsch   PetscCall(PetscOptionsInt("-ntimes", "How many times do the cycle", EX, options->ntimes, &options->ntimes, NULL));
36*d0609cedSBarry Smith   PetscOptionsEnd();
377d26aeb3SVaclav Hapla   PetscFunctionReturn(0);
387d26aeb3SVaclav Hapla };
397d26aeb3SVaclav Hapla 
407d26aeb3SVaclav Hapla //TODO test DMLabel I/O (not yet working for PETSC_VIEWER_HDF5_XDMF)
417d26aeb3SVaclav Hapla int main(int argc, char **argv)
427d26aeb3SVaclav Hapla {
437d26aeb3SVaclav Hapla   AppCtx            user;
44efa12513Sksagiyam   MPI_Comm          comm;
45efa12513Sksagiyam   PetscMPIInt       gsize, grank, mycolor;
467d26aeb3SVaclav Hapla   PetscInt          i;
477d26aeb3SVaclav Hapla   PetscBool         flg;
48c9ad657eSksagiyam   const char        exampleDMPlexName[] = "DMPlex Object";
49efa12513Sksagiyam   const char        *infilename;
50efa12513Sksagiyam   PetscViewerFormat informat;
517d26aeb3SVaclav Hapla 
529566063dSJacob Faibussowitsch   PetscCall(PetscInitialize(&argc, &argv, NULL,help));
539566063dSJacob Faibussowitsch   PetscCall(ProcessOptions(PETSC_COMM_WORLD, &user));
549566063dSJacob Faibussowitsch   PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &gsize));
559566063dSJacob Faibussowitsch   PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &grank));
567d26aeb3SVaclav Hapla 
577d26aeb3SVaclav Hapla   for (i=0; i<user.ntimes; i++) {
58efa12513Sksagiyam     if (i==0) {
59efa12513Sksagiyam       /* Use infile/informat for the initial load */
60efa12513Sksagiyam       infilename = user.infile;
61efa12513Sksagiyam       informat   = user.informat;
62efa12513Sksagiyam     } else {
63efa12513Sksagiyam       /* Use outfile/outformat for all I/O except the very initial load */
64efa12513Sksagiyam       infilename = user.outfile;
65efa12513Sksagiyam       informat   = user.outformat;
66efa12513Sksagiyam     }
67efa12513Sksagiyam 
68efa12513Sksagiyam     if (user.heterogeneous) {
69efa12513Sksagiyam       mycolor = (PetscMPIInt) (grank > user.ntimes-i);
70efa12513Sksagiyam     } else {
71efa12513Sksagiyam       mycolor = (PetscMPIInt) 0;
72efa12513Sksagiyam     }
739566063dSJacob Faibussowitsch     PetscCallMPI(MPI_Comm_split(PETSC_COMM_WORLD, mycolor, grank, &comm));
74efa12513Sksagiyam 
75efa12513Sksagiyam     if (mycolor == 0) {
76efa12513Sksagiyam       /* Load/Save only on processes with mycolor == 0 */
777d26aeb3SVaclav Hapla       DM          dm;
787d26aeb3SVaclav Hapla       PetscViewer v;
797d26aeb3SVaclav Hapla 
809566063dSJacob Faibussowitsch       PetscCall(PetscPrintf(comm, "Begin cycle %D\n", i));
817d26aeb3SVaclav Hapla 
827d26aeb3SVaclav Hapla       /* Load data from XDMF into dm in parallel */
837d26aeb3SVaclav Hapla       /* We could also use
849566063dSJacob Faibussowitsch           PetscCall(DMPlexCreateFromFile(PETSC_COMM_WORLD, user.filename, "ex5_plex", PETSC_TRUE, &dm));
857d26aeb3SVaclav Hapla         This currently support a few more formats than DMLoad().
867d26aeb3SVaclav Hapla       */
879566063dSJacob Faibussowitsch       PetscCall(PetscViewerHDF5Open(comm, infilename, FILE_MODE_READ, &v));
889566063dSJacob Faibussowitsch       PetscCall(PetscViewerPushFormat(v, informat));
899566063dSJacob Faibussowitsch       PetscCall(DMCreate(comm, &dm));
909566063dSJacob Faibussowitsch       PetscCall(DMSetType(dm, DMPLEX));
919566063dSJacob Faibussowitsch       PetscCall(PetscObjectSetName((PetscObject) dm, exampleDMPlexName));
929566063dSJacob Faibussowitsch       PetscCall(DMSetOptionsPrefix(dm,"loaded_"));
939566063dSJacob Faibussowitsch       PetscCall(DMLoad(dm, v));
949566063dSJacob Faibussowitsch       PetscCall(DMPlexDistributeSetDefault(dm, PETSC_FALSE));
959566063dSJacob Faibussowitsch       PetscCall(DMSetFromOptions(dm));
969566063dSJacob Faibussowitsch       PetscCall(DMViewFromOptions(dm, NULL, "-dm_view"));
979566063dSJacob Faibussowitsch       PetscCall(PetscViewerPopFormat(v));
989566063dSJacob Faibussowitsch       PetscCall(PetscViewerDestroy(&v));
997d26aeb3SVaclav Hapla 
1007d26aeb3SVaclav Hapla       /* We just test/demonstrate DM is indeed distributed - unneeded in the application code */
1019566063dSJacob Faibussowitsch       PetscCall(DMPlexIsDistributed(dm, &flg));
1029566063dSJacob Faibussowitsch       PetscCall(PetscPrintf(comm, "Loaded mesh distributed? %s\n", PetscBools[flg]));
1037d26aeb3SVaclav Hapla 
1047d26aeb3SVaclav Hapla       /* Interpolate */
1059566063dSJacob Faibussowitsch       PetscCall(DMSetOptionsPrefix(dm,"interpolated_"));
1069566063dSJacob Faibussowitsch       PetscCall(DMSetFromOptions(dm));
1079566063dSJacob Faibussowitsch       PetscCall(DMViewFromOptions(dm, NULL, "-dm_view"));
1087d26aeb3SVaclav Hapla 
1097d26aeb3SVaclav Hapla       /* Redistribute */
1109566063dSJacob Faibussowitsch       PetscCall(DMSetOptionsPrefix(dm,"redistributed_"));
1119566063dSJacob Faibussowitsch       PetscCall(DMSetFromOptions(dm));
1129566063dSJacob Faibussowitsch       PetscCall(DMViewFromOptions(dm, NULL, "-dm_view"));
1137d26aeb3SVaclav Hapla 
1147d26aeb3SVaclav Hapla       /* Save redistributed dm to XDMF in parallel and destroy it */
1159566063dSJacob Faibussowitsch       PetscCall(PetscViewerHDF5Open(comm, user.outfile, FILE_MODE_WRITE, &v));
1169566063dSJacob Faibussowitsch       PetscCall(PetscViewerPushFormat(v, user.outformat));
1179566063dSJacob Faibussowitsch       PetscCall(PetscObjectSetName((PetscObject) dm, exampleDMPlexName));
1189566063dSJacob Faibussowitsch       PetscCall(DMView(dm, v));
1199566063dSJacob Faibussowitsch       PetscCall(PetscViewerPopFormat(v));
1209566063dSJacob Faibussowitsch       PetscCall(PetscViewerDestroy(&v));
1219566063dSJacob Faibussowitsch       PetscCall(DMDestroy(&dm));
1227d26aeb3SVaclav Hapla 
1239566063dSJacob Faibussowitsch       PetscCall(PetscPrintf(comm, "End   cycle %D\n--------\n",i));
1247d26aeb3SVaclav Hapla     }
1259566063dSJacob Faibussowitsch     PetscCallMPI(MPI_Comm_free(&comm));
1269566063dSJacob Faibussowitsch     PetscCallMPI(MPI_Barrier(PETSC_COMM_WORLD));
127efa12513Sksagiyam   }
1287d26aeb3SVaclav Hapla 
1297d26aeb3SVaclav Hapla   /* Final clean-up */
1309566063dSJacob Faibussowitsch   PetscCall(PetscFinalize());
131b122ec5aSJacob Faibussowitsch   return 0;
1327d26aeb3SVaclav Hapla }
1337d26aeb3SVaclav Hapla 
1347d26aeb3SVaclav Hapla /*TEST
1357d26aeb3SVaclav Hapla   build:
1367d26aeb3SVaclav Hapla     requires: hdf5
1377d26aeb3SVaclav Hapla   testset:
1387d26aeb3SVaclav Hapla     suffix: 0
1397d26aeb3SVaclav Hapla     requires: !complex
140efa12513Sksagiyam     nsize: 4
1417d26aeb3SVaclav Hapla     args: -infile ${wPETSC_DIR}/share/petsc/datafiles/meshes/blockcylinder-50.h5 -informat hdf5_xdmf
1427d26aeb3SVaclav Hapla     args: -outfile ex5_dump.h5 -outformat {{hdf5_xdmf hdf5_petsc}separate output}
143ffa8c570SMatthew G. Knepley     args: -ntimes 3 -interpolated_dm_plex_interpolate_pre -redistributed_dm_distribute
144efa12513Sksagiyam     args: -loaded_dm_view -interpolated_dm_view -redistributed_dm_view
1457d26aeb3SVaclav Hapla     test:
1467d26aeb3SVaclav Hapla       # this partitioner should not shuffle anything, it should yield the same partititioning as the XDMF reader - added just for testing
1477d26aeb3SVaclav Hapla       suffix: simple
1487d26aeb3SVaclav Hapla       args: -petscpartitioner_type simple
1497d26aeb3SVaclav Hapla     test:
1507d26aeb3SVaclav Hapla       suffix: parmetis
1517d26aeb3SVaclav Hapla       requires: parmetis
1527d26aeb3SVaclav Hapla       args: -petscpartitioner_type parmetis
1537d26aeb3SVaclav Hapla     test:
1547d26aeb3SVaclav Hapla       suffix: ptscotch
1557d26aeb3SVaclav Hapla       requires: ptscotch
1567d26aeb3SVaclav Hapla       args: -petscpartitioner_type ptscotch
157efa12513Sksagiyam 
158efa12513Sksagiyam   testset:
159efa12513Sksagiyam     suffix: 1
160efa12513Sksagiyam     requires: !complex
161efa12513Sksagiyam     nsize: 4
162efa12513Sksagiyam     args: -infile ${wPETSC_DIR}/share/petsc/datafiles/meshes/blockcylinder-50.h5 -informat hdf5_xdmf
163efa12513Sksagiyam     args: -outfile ex5_dump.h5 -outformat {{hdf5_xdmf hdf5_petsc}separate output}
164ffa8c570SMatthew G. Knepley     args: -ntimes 3 -interpolated_dm_plex_interpolate_pre -redistributed_dm_distribute
165efa12513Sksagiyam     args: -heterogeneous True
166efa12513Sksagiyam     args: -loaded_dm_view -interpolated_dm_view -redistributed_dm_view
167efa12513Sksagiyam     test:
168efa12513Sksagiyam       suffix: simple
169efa12513Sksagiyam       args: -petscpartitioner_type simple
170efa12513Sksagiyam     test:
171efa12513Sksagiyam       suffix: parmetis
172efa12513Sksagiyam       requires: parmetis
173efa12513Sksagiyam       args: -petscpartitioner_type parmetis
174efa12513Sksagiyam     test:
175efa12513Sksagiyam       suffix: ptscotch
176efa12513Sksagiyam       requires: ptscotch
177efa12513Sksagiyam       args: -petscpartitioner_type ptscotch
178efa12513Sksagiyam 
1797d26aeb3SVaclav Hapla TEST*/
180