xref: /petsc/src/dm/impls/plex/tutorials/ex5.c (revision 970231d20df44f79b27787157e39d441e79f434b)
1ffa8c570SMatthew G. Knepley static char help[] = "Demonstrate HDF5 parallel load-save-reload cycle\n\n";
27d26aeb3SVaclav Hapla 
37d26aeb3SVaclav Hapla #include <petscdmplex.h>
47d26aeb3SVaclav Hapla #include <petscviewerhdf5.h>
57d26aeb3SVaclav Hapla #define EX "ex5.c"
67d26aeb3SVaclav Hapla 
77d26aeb3SVaclav Hapla typedef struct {
87d26aeb3SVaclav Hapla   char              infile[PETSC_MAX_PATH_LEN];  /* Input mesh filename */
97d26aeb3SVaclav Hapla   char              outfile[PETSC_MAX_PATH_LEN]; /* Dump/reload mesh filename */
107d26aeb3SVaclav Hapla   PetscViewerFormat informat;                    /* Input mesh format */
117d26aeb3SVaclav Hapla   PetscViewerFormat outformat;                   /* Dump/reload mesh format */
12efa12513Sksagiyam   PetscBool         heterogeneous;               /* Test save on N / load on M */
137d26aeb3SVaclav Hapla   PetscInt          ntimes;                      /* How many times do the cycle */
147d26aeb3SVaclav Hapla } AppCtx;
157d26aeb3SVaclav Hapla 
ProcessOptions(MPI_Comm comm,AppCtx * options)16d71ae5a4SJacob Faibussowitsch static PetscErrorCode ProcessOptions(MPI_Comm comm, AppCtx *options)
17d71ae5a4SJacob Faibussowitsch {
187d26aeb3SVaclav Hapla   PetscBool flg;
197d26aeb3SVaclav Hapla 
207d26aeb3SVaclav Hapla   PetscFunctionBeginUser;
217d26aeb3SVaclav Hapla   options->infile[0]     = '\0';
227d26aeb3SVaclav Hapla   options->outfile[0]    = '\0';
237d26aeb3SVaclav Hapla   options->informat      = PETSC_VIEWER_HDF5_XDMF;
247d26aeb3SVaclav Hapla   options->outformat     = PETSC_VIEWER_HDF5_XDMF;
25efa12513Sksagiyam   options->heterogeneous = PETSC_FALSE;
267d26aeb3SVaclav Hapla   options->ntimes        = 2;
27d0609cedSBarry Smith   PetscOptionsBegin(comm, "", "Meshing Problem Options", "DMPLEX");
289566063dSJacob Faibussowitsch   PetscCall(PetscOptionsString("-infile", "The input mesh file", EX, options->infile, options->infile, sizeof(options->infile), &flg));
2928b400f6SJacob Faibussowitsch   PetscCheck(flg, comm, PETSC_ERR_USER_INPUT, "-infile needs to be specified");
309566063dSJacob Faibussowitsch   PetscCall(PetscOptionsString("-outfile", "The output mesh file (by default it's the same as infile)", EX, options->outfile, options->outfile, sizeof(options->outfile), &flg));
3128b400f6SJacob Faibussowitsch   PetscCheck(flg, comm, PETSC_ERR_USER_INPUT, "-outfile needs to be specified");
329566063dSJacob Faibussowitsch   PetscCall(PetscOptionsEnum("-informat", "Input mesh format", EX, PetscViewerFormats, (PetscEnum)options->informat, (PetscEnum *)&options->informat, NULL));
339566063dSJacob Faibussowitsch   PetscCall(PetscOptionsEnum("-outformat", "Dump/reload mesh format", EX, PetscViewerFormats, (PetscEnum)options->outformat, (PetscEnum *)&options->outformat, NULL));
349566063dSJacob Faibussowitsch   PetscCall(PetscOptionsBool("-heterogeneous", "Test save on N / load on M", EX, options->heterogeneous, &options->heterogeneous, NULL));
359566063dSJacob Faibussowitsch   PetscCall(PetscOptionsInt("-ntimes", "How many times do the cycle", EX, options->ntimes, &options->ntimes, NULL));
36d0609cedSBarry Smith   PetscOptionsEnd();
373ba16761SJacob Faibussowitsch   PetscFunctionReturn(PETSC_SUCCESS);
38*f4f49eeaSPierre Jolivet }
397d26aeb3SVaclav Hapla 
407d26aeb3SVaclav Hapla //TODO test DMLabel I/O (not yet working for PETSC_VIEWER_HDF5_XDMF)
main(int argc,char ** argv)41d71ae5a4SJacob Faibussowitsch int main(int argc, char **argv)
42d71ae5a4SJacob Faibussowitsch {
437d26aeb3SVaclav Hapla   AppCtx            user;
44efa12513Sksagiyam   MPI_Comm          comm;
45efa12513Sksagiyam   PetscMPIInt       gsize, grank, mycolor;
467d26aeb3SVaclav Hapla   PetscInt          i;
477d26aeb3SVaclav Hapla   PetscBool         flg;
48c9ad657eSksagiyam   const char        exampleDMPlexName[] = "DMPlex Object";
49efa12513Sksagiyam   const char       *infilename;
50efa12513Sksagiyam   PetscViewerFormat informat;
517d26aeb3SVaclav Hapla 
52327415f7SBarry Smith   PetscFunctionBeginUser;
539566063dSJacob Faibussowitsch   PetscCall(PetscInitialize(&argc, &argv, NULL, help));
549566063dSJacob Faibussowitsch   PetscCall(ProcessOptions(PETSC_COMM_WORLD, &user));
559566063dSJacob Faibussowitsch   PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &gsize));
569566063dSJacob Faibussowitsch   PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &grank));
577d26aeb3SVaclav Hapla 
587d26aeb3SVaclav Hapla   for (i = 0; i < user.ntimes; i++) {
59efa12513Sksagiyam     if (i == 0) {
60efa12513Sksagiyam       /* Use infile/informat for the initial load */
61efa12513Sksagiyam       infilename = user.infile;
62efa12513Sksagiyam       informat   = user.informat;
63efa12513Sksagiyam     } else {
64efa12513Sksagiyam       /* Use outfile/outformat for all I/O except the very initial load */
65efa12513Sksagiyam       infilename = user.outfile;
66efa12513Sksagiyam       informat   = user.outformat;
67efa12513Sksagiyam     }
68efa12513Sksagiyam 
69efa12513Sksagiyam     if (user.heterogeneous) {
70efa12513Sksagiyam       mycolor = (PetscMPIInt)(grank > user.ntimes - i);
71efa12513Sksagiyam     } else {
72efa12513Sksagiyam       mycolor = (PetscMPIInt)0;
73efa12513Sksagiyam     }
749566063dSJacob Faibussowitsch     PetscCallMPI(MPI_Comm_split(PETSC_COMM_WORLD, mycolor, grank, &comm));
75efa12513Sksagiyam 
76efa12513Sksagiyam     if (mycolor == 0) {
77efa12513Sksagiyam       /* Load/Save only on processes with mycolor == 0 */
787d26aeb3SVaclav Hapla       DM          dm;
797d26aeb3SVaclav Hapla       PetscViewer v;
807d26aeb3SVaclav Hapla 
8163a3b9bcSJacob Faibussowitsch       PetscCall(PetscPrintf(comm, "Begin cycle %" PetscInt_FMT "\n", i));
827d26aeb3SVaclav Hapla 
837d26aeb3SVaclav Hapla       /* Load data from XDMF into dm in parallel */
847d26aeb3SVaclav Hapla       /* We could also use
859566063dSJacob Faibussowitsch           PetscCall(DMPlexCreateFromFile(PETSC_COMM_WORLD, user.filename, "ex5_plex", PETSC_TRUE, &dm));
867d26aeb3SVaclav Hapla         This currently support a few more formats than DMLoad().
877d26aeb3SVaclav Hapla       */
889566063dSJacob Faibussowitsch       PetscCall(PetscViewerHDF5Open(comm, infilename, FILE_MODE_READ, &v));
899566063dSJacob Faibussowitsch       PetscCall(PetscViewerPushFormat(v, informat));
909566063dSJacob Faibussowitsch       PetscCall(DMCreate(comm, &dm));
919566063dSJacob Faibussowitsch       PetscCall(DMSetType(dm, DMPLEX));
929566063dSJacob Faibussowitsch       PetscCall(PetscObjectSetName((PetscObject)dm, exampleDMPlexName));
939566063dSJacob Faibussowitsch       PetscCall(DMSetOptionsPrefix(dm, "loaded_"));
949566063dSJacob Faibussowitsch       PetscCall(DMLoad(dm, v));
959566063dSJacob Faibussowitsch       PetscCall(DMPlexDistributeSetDefault(dm, PETSC_FALSE));
969566063dSJacob Faibussowitsch       PetscCall(DMSetFromOptions(dm));
979566063dSJacob Faibussowitsch       PetscCall(DMViewFromOptions(dm, NULL, "-dm_view"));
989566063dSJacob Faibussowitsch       PetscCall(PetscViewerPopFormat(v));
999566063dSJacob Faibussowitsch       PetscCall(PetscViewerDestroy(&v));
1007d26aeb3SVaclav Hapla 
1017d26aeb3SVaclav Hapla       /* We just test/demonstrate DM is indeed distributed - unneeded in the application code */
1029566063dSJacob Faibussowitsch       PetscCall(DMPlexIsDistributed(dm, &flg));
1039566063dSJacob Faibussowitsch       PetscCall(PetscPrintf(comm, "Loaded mesh distributed? %s\n", PetscBools[flg]));
1047d26aeb3SVaclav Hapla 
1057d26aeb3SVaclav Hapla       /* Interpolate */
1069566063dSJacob Faibussowitsch       PetscCall(DMSetOptionsPrefix(dm, "interpolated_"));
1079566063dSJacob Faibussowitsch       PetscCall(DMSetFromOptions(dm));
1089566063dSJacob Faibussowitsch       PetscCall(DMViewFromOptions(dm, NULL, "-dm_view"));
1097d26aeb3SVaclav Hapla 
1107d26aeb3SVaclav Hapla       /* Redistribute */
1119566063dSJacob Faibussowitsch       PetscCall(DMSetOptionsPrefix(dm, "redistributed_"));
1129566063dSJacob Faibussowitsch       PetscCall(DMSetFromOptions(dm));
1139566063dSJacob Faibussowitsch       PetscCall(DMViewFromOptions(dm, NULL, "-dm_view"));
1147d26aeb3SVaclav Hapla 
1157d26aeb3SVaclav Hapla       /* Save redistributed dm to XDMF in parallel and destroy it */
1169566063dSJacob Faibussowitsch       PetscCall(PetscViewerHDF5Open(comm, user.outfile, FILE_MODE_WRITE, &v));
1179566063dSJacob Faibussowitsch       PetscCall(PetscViewerPushFormat(v, user.outformat));
1189566063dSJacob Faibussowitsch       PetscCall(PetscObjectSetName((PetscObject)dm, exampleDMPlexName));
1199566063dSJacob Faibussowitsch       PetscCall(DMView(dm, v));
1209566063dSJacob Faibussowitsch       PetscCall(PetscViewerPopFormat(v));
1219566063dSJacob Faibussowitsch       PetscCall(PetscViewerDestroy(&v));
1229566063dSJacob Faibussowitsch       PetscCall(DMDestroy(&dm));
1237d26aeb3SVaclav Hapla 
12463a3b9bcSJacob Faibussowitsch       PetscCall(PetscPrintf(comm, "End   cycle %" PetscInt_FMT "\n--------\n", i));
1257d26aeb3SVaclav Hapla     }
1269566063dSJacob Faibussowitsch     PetscCallMPI(MPI_Comm_free(&comm));
1279566063dSJacob Faibussowitsch     PetscCallMPI(MPI_Barrier(PETSC_COMM_WORLD));
128efa12513Sksagiyam   }
1297d26aeb3SVaclav Hapla 
1307d26aeb3SVaclav Hapla   /* Final clean-up */
1319566063dSJacob Faibussowitsch   PetscCall(PetscFinalize());
132b122ec5aSJacob Faibussowitsch   return 0;
1337d26aeb3SVaclav Hapla }
1347d26aeb3SVaclav Hapla 
1357d26aeb3SVaclav Hapla /*TEST
1367d26aeb3SVaclav Hapla   build:
1377d26aeb3SVaclav Hapla     requires: hdf5
1387d26aeb3SVaclav Hapla   testset:
1397d26aeb3SVaclav Hapla     suffix: 0
1407d26aeb3SVaclav Hapla     requires: !complex
141efa12513Sksagiyam     nsize: 4
1427d26aeb3SVaclav Hapla     args: -infile ${wPETSC_DIR}/share/petsc/datafiles/meshes/blockcylinder-50.h5 -informat hdf5_xdmf
1437d26aeb3SVaclav Hapla     args: -outfile ex5_dump.h5 -outformat {{hdf5_xdmf hdf5_petsc}separate output}
144ffa8c570SMatthew G. Knepley     args: -ntimes 3 -interpolated_dm_plex_interpolate_pre -redistributed_dm_distribute
145efa12513Sksagiyam     args: -loaded_dm_view -interpolated_dm_view -redistributed_dm_view
1467d26aeb3SVaclav Hapla     test:
147145b44c9SPierre Jolivet       # this partitioner should not shuffle anything, it should yield the same partitioning as the XDMF reader - added just for testing
1487d26aeb3SVaclav Hapla       suffix: simple
1497d26aeb3SVaclav Hapla       args: -petscpartitioner_type simple
1507d26aeb3SVaclav Hapla     test:
1517d26aeb3SVaclav Hapla       suffix: parmetis
1527d26aeb3SVaclav Hapla       requires: parmetis
1537d26aeb3SVaclav Hapla       args: -petscpartitioner_type parmetis
1547d26aeb3SVaclav Hapla     test:
1557d26aeb3SVaclav Hapla       suffix: ptscotch
1567d26aeb3SVaclav Hapla       requires: ptscotch
1577d26aeb3SVaclav Hapla       args: -petscpartitioner_type ptscotch
158efa12513Sksagiyam 
159efa12513Sksagiyam   testset:
160efa12513Sksagiyam     suffix: 1
161efa12513Sksagiyam     requires: !complex
162efa12513Sksagiyam     nsize: 4
163efa12513Sksagiyam     args: -infile ${wPETSC_DIR}/share/petsc/datafiles/meshes/blockcylinder-50.h5 -informat hdf5_xdmf
164efa12513Sksagiyam     args: -outfile ex5_dump.h5 -outformat {{hdf5_xdmf hdf5_petsc}separate output}
165ffa8c570SMatthew G. Knepley     args: -ntimes 3 -interpolated_dm_plex_interpolate_pre -redistributed_dm_distribute
166efa12513Sksagiyam     args: -heterogeneous True
167efa12513Sksagiyam     args: -loaded_dm_view -interpolated_dm_view -redistributed_dm_view
168efa12513Sksagiyam     test:
169efa12513Sksagiyam       suffix: simple
170efa12513Sksagiyam       args: -petscpartitioner_type simple
171efa12513Sksagiyam     test:
172efa12513Sksagiyam       suffix: parmetis
173efa12513Sksagiyam       requires: parmetis
174efa12513Sksagiyam       args: -petscpartitioner_type parmetis
175efa12513Sksagiyam     test:
176efa12513Sksagiyam       suffix: ptscotch
177efa12513Sksagiyam       requires: ptscotch
178efa12513Sksagiyam       args: -petscpartitioner_type ptscotch
179efa12513Sksagiyam 
1807d26aeb3SVaclav Hapla TEST*/
181