xref: /petsc/src/dm/impls/plex/tutorials/ex5.c (revision efa12513287cff49a2b9648ae83199dcbfaad71a)
17d26aeb3SVaclav Hapla static char help[] = "Demonstrate HDF5/XDMF load-save-reload cycle\n\n";
27d26aeb3SVaclav Hapla 
37d26aeb3SVaclav Hapla #include <petscdmplex.h>
47d26aeb3SVaclav Hapla #include <petscviewerhdf5.h>
57d26aeb3SVaclav Hapla #define EX "ex5.c"
67d26aeb3SVaclav Hapla 
77d26aeb3SVaclav Hapla typedef struct {
87d26aeb3SVaclav Hapla   char              infile[PETSC_MAX_PATH_LEN];  /* Input mesh filename */
97d26aeb3SVaclav Hapla   char              outfile[PETSC_MAX_PATH_LEN]; /* Dump/reload mesh filename */
107d26aeb3SVaclav Hapla   PetscViewerFormat informat;                    /* Input mesh format */
117d26aeb3SVaclav Hapla   PetscViewerFormat outformat;                   /* Dump/reload mesh format */
127d26aeb3SVaclav Hapla   PetscBool         redistribute;                /* Redistribute the mesh */
13*efa12513Sksagiyam   PetscBool         heterogeneous;               /* Test save on N / load on M */
147d26aeb3SVaclav Hapla   PetscInt          ntimes;                      /* How many times do the cycle */
157d26aeb3SVaclav Hapla } AppCtx;
167d26aeb3SVaclav Hapla 
177d26aeb3SVaclav Hapla static PetscErrorCode ProcessOptions(MPI_Comm comm, AppCtx *options)
187d26aeb3SVaclav Hapla {
197d26aeb3SVaclav Hapla   PetscBool      flg;
207d26aeb3SVaclav Hapla   PetscErrorCode ierr;
217d26aeb3SVaclav Hapla 
227d26aeb3SVaclav Hapla   PetscFunctionBeginUser;
237d26aeb3SVaclav Hapla   options->infile[0]     = '\0';
247d26aeb3SVaclav Hapla   options->outfile[0]    = '\0';
257d26aeb3SVaclav Hapla   options->informat      = PETSC_VIEWER_HDF5_XDMF;
267d26aeb3SVaclav Hapla   options->outformat     = PETSC_VIEWER_HDF5_XDMF;
277d26aeb3SVaclav Hapla   options->redistribute  = PETSC_TRUE;
28*efa12513Sksagiyam   options->heterogeneous = PETSC_FALSE;
297d26aeb3SVaclav Hapla   options->ntimes        = 2;
307d26aeb3SVaclav Hapla   ierr = PetscOptionsBegin(comm, "", "Meshing Problem Options", "DMPLEX");CHKERRQ(ierr);
317d26aeb3SVaclav Hapla   ierr = PetscOptionsString("-infile", "The input mesh file", EX, options->infile, options->infile, sizeof(options->infile), &flg);CHKERRQ(ierr);
327d26aeb3SVaclav Hapla   if (!flg) SETERRQ(comm, PETSC_ERR_USER_INPUT, "-infile needs to be specified");
337d26aeb3SVaclav Hapla   ierr = PetscOptionsString("-outfile", "The output mesh file (by default it's the same as infile)", EX, options->outfile, options->outfile, sizeof(options->outfile), &flg);CHKERRQ(ierr);
347d26aeb3SVaclav Hapla   if (!flg) SETERRQ(comm, PETSC_ERR_USER_INPUT, "-outfile needs to be specified");
357d26aeb3SVaclav Hapla   ierr = PetscOptionsEnum("-informat", "Input mesh format", EX, PetscViewerFormats, (PetscEnum)options->informat, (PetscEnum*)&options->informat, NULL);CHKERRQ(ierr);
367d26aeb3SVaclav Hapla   ierr = PetscOptionsEnum("-outformat", "Dump/reload mesh format", EX, PetscViewerFormats, (PetscEnum)options->outformat, (PetscEnum*)&options->outformat, NULL);CHKERRQ(ierr);
377d26aeb3SVaclav Hapla   ierr = PetscOptionsBool("-redistribute", "Redistribute the mesh", EX, options->redistribute, &options->redistribute, NULL);CHKERRQ(ierr);
38*efa12513Sksagiyam   ierr = PetscOptionsBool("-heterogeneous", "Test save on N / load on M", EX, options->heterogeneous, &options->heterogeneous, NULL);CHKERRQ(ierr);
397d26aeb3SVaclav Hapla   ierr = PetscOptionsInt("-ntimes", "How many times do the cycle", EX, options->ntimes, &options->ntimes, NULL);CHKERRQ(ierr);
407d26aeb3SVaclav Hapla   ierr = PetscOptionsEnd();
417d26aeb3SVaclav Hapla   PetscFunctionReturn(0);
427d26aeb3SVaclav Hapla };
437d26aeb3SVaclav Hapla 
447d26aeb3SVaclav Hapla //TODO test DMLabel I/O (not yet working for PETSC_VIEWER_HDF5_XDMF)
457d26aeb3SVaclav Hapla int main(int argc, char **argv)
467d26aeb3SVaclav Hapla {
477d26aeb3SVaclav Hapla   AppCtx            user;
48*efa12513Sksagiyam   MPI_Comm          comm;
49*efa12513Sksagiyam   PetscMPIInt       gsize, grank, mycolor;
507d26aeb3SVaclav Hapla   PetscInt          i;
517d26aeb3SVaclav Hapla   PetscBool         flg;
527d26aeb3SVaclav Hapla   PetscErrorCode    ierr;
53*efa12513Sksagiyam   const char        *infilename;
54*efa12513Sksagiyam   PetscViewerFormat informat;
557d26aeb3SVaclav Hapla 
567d26aeb3SVaclav Hapla   ierr = PetscInitialize(&argc, &argv, NULL,help);if (ierr) return ierr;
57*efa12513Sksagiyam   ierr = ProcessOptions(PETSC_COMM_WORLD, &user);CHKERRQ(ierr);
58*efa12513Sksagiyam   ierr = MPI_Comm_size(PETSC_COMM_WORLD,&gsize);CHKERRQ(ierr);
59*efa12513Sksagiyam   ierr = MPI_Comm_rank(PETSC_COMM_WORLD,&grank);CHKERRQ(ierr);
607d26aeb3SVaclav Hapla 
617d26aeb3SVaclav Hapla   for (i=0; i<user.ntimes; i++) {
62*efa12513Sksagiyam     if (i==0) {
63*efa12513Sksagiyam       /* Use infile/informat for the initial load */
64*efa12513Sksagiyam       infilename = user.infile;
65*efa12513Sksagiyam       informat   = user.informat;
66*efa12513Sksagiyam     } else {
67*efa12513Sksagiyam       /* Use outfile/outformat for all I/O except the very initial load */
68*efa12513Sksagiyam       infilename = user.outfile;
69*efa12513Sksagiyam       informat   = user.outformat;
70*efa12513Sksagiyam     }
71*efa12513Sksagiyam 
72*efa12513Sksagiyam     if (user.heterogeneous) {
73*efa12513Sksagiyam       mycolor = (PetscMPIInt)(grank > user.ntimes-i);
74*efa12513Sksagiyam     } else {
75*efa12513Sksagiyam       mycolor = (PetscMPIInt)0;
76*efa12513Sksagiyam       /* comm = PETSC_COMM_WORLD; */
77*efa12513Sksagiyam     }
78*efa12513Sksagiyam     ierr = MPI_Comm_split(PETSC_COMM_WORLD,mycolor,grank,&comm);CHKERRQ(ierr);
79*efa12513Sksagiyam 
80*efa12513Sksagiyam     if (mycolor == 0) {
81*efa12513Sksagiyam       /* Load/Save only on processes with mycolor == 0 */
827d26aeb3SVaclav Hapla       DM                dm;
837d26aeb3SVaclav Hapla       PetscPartitioner  part;
847d26aeb3SVaclav Hapla       PetscViewer       v;
857d26aeb3SVaclav Hapla 
867d26aeb3SVaclav Hapla       ierr = PetscPrintf(comm, "Begin cycle %D\n",i);CHKERRQ(ierr);
877d26aeb3SVaclav Hapla 
887d26aeb3SVaclav Hapla       /* Load data from XDMF into dm in parallel */
897d26aeb3SVaclav Hapla       /* We could also use
907d26aeb3SVaclav Hapla           ierr = DMPlexCreateFromFile(PETSC_COMM_WORLD, user.filename, PETSC_TRUE, &dm);CHKERRQ(ierr);
917d26aeb3SVaclav Hapla         This currently support a few more formats than DMLoad().
927d26aeb3SVaclav Hapla       */
93*efa12513Sksagiyam       ierr = PetscViewerHDF5Open(comm, infilename, FILE_MODE_READ, &v);CHKERRQ(ierr);
94*efa12513Sksagiyam       ierr = PetscViewerPushFormat(v, informat);CHKERRQ(ierr);
957d26aeb3SVaclav Hapla       ierr = DMCreate(comm, &dm);CHKERRQ(ierr);
967d26aeb3SVaclav Hapla       ierr = DMSetType(dm, DMPLEX);CHKERRQ(ierr);
97*efa12513Sksagiyam       ierr = PetscObjectSetName((PetscObject) dm, "DMPlex Object");CHKERRQ(ierr);
987d26aeb3SVaclav Hapla       ierr = DMSetOptionsPrefix(dm,"loaded_");CHKERRQ(ierr);
997d26aeb3SVaclav Hapla       ierr = DMLoad(dm, v);CHKERRQ(ierr);
1007d26aeb3SVaclav Hapla       ierr = DMSetFromOptions(dm);CHKERRQ(ierr);
1017d26aeb3SVaclav Hapla       ierr = DMViewFromOptions(dm, NULL, "-dm_view");CHKERRQ(ierr);
1027d26aeb3SVaclav Hapla       ierr = PetscViewerPopFormat(v);CHKERRQ(ierr);
1037d26aeb3SVaclav Hapla       ierr = PetscViewerDestroy(&v);CHKERRQ(ierr);
1047d26aeb3SVaclav Hapla 
1057d26aeb3SVaclav Hapla       /* We just test/demonstrate DM is indeed distributed - unneeded in the application code */
1067d26aeb3SVaclav Hapla       ierr = DMPlexIsDistributed(dm, &flg);CHKERRQ(ierr);
1077d26aeb3SVaclav Hapla       ierr = PetscPrintf(comm, "Loaded mesh distributed? %s\n", PetscBools[flg]);
1087d26aeb3SVaclav Hapla 
1097d26aeb3SVaclav Hapla       /* Interpolate */
1107d26aeb3SVaclav Hapla       //TODO we want to be able to do this from options in DMSetFromOptions() probably
1117d26aeb3SVaclav Hapla       //TODO we want to be able to do this in-place
1127d26aeb3SVaclav Hapla       {
1137d26aeb3SVaclav Hapla         DM idm;
1147d26aeb3SVaclav Hapla 
1157d26aeb3SVaclav Hapla         ierr = DMPlexInterpolate(dm, &idm);CHKERRQ(ierr);
1167d26aeb3SVaclav Hapla         ierr = DMDestroy(&dm);CHKERRQ(ierr);
1177d26aeb3SVaclav Hapla         dm   = idm;
1187d26aeb3SVaclav Hapla           ierr = DMSetOptionsPrefix(dm,"interpolated_");CHKERRQ(ierr);
1197d26aeb3SVaclav Hapla           ierr = DMSetFromOptions(dm);CHKERRQ(ierr);
1207d26aeb3SVaclav Hapla           ierr = DMViewFromOptions(dm, NULL, "-dm_view");CHKERRQ(ierr);
1217d26aeb3SVaclav Hapla       }
1227d26aeb3SVaclav Hapla 
1237d26aeb3SVaclav Hapla       /* Redistribute */
1247d26aeb3SVaclav Hapla       //TODO we want to be able to do this from options in DMSetFromOptions() probably
1257d26aeb3SVaclav Hapla       if (user.redistribute) {
1267d26aeb3SVaclav Hapla         DM dmdist;
1277d26aeb3SVaclav Hapla 
1287d26aeb3SVaclav Hapla         ierr = DMPlexGetPartitioner(dm, &part);CHKERRQ(ierr);
1297d26aeb3SVaclav Hapla         ierr = PetscPartitionerSetFromOptions(part);CHKERRQ(ierr);
1307d26aeb3SVaclav Hapla         ierr = DMPlexDistribute(dm, 0, NULL, &dmdist);CHKERRQ(ierr);
1317d26aeb3SVaclav Hapla         //TODO we want to be able to do this in-place
1327d26aeb3SVaclav Hapla         if (dmdist) {
1337d26aeb3SVaclav Hapla           ierr = DMDestroy(&dm);CHKERRQ(ierr);
1347d26aeb3SVaclav Hapla           dm   = dmdist;
1357d26aeb3SVaclav Hapla           ierr = DMSetOptionsPrefix(dm,"redistributed_");CHKERRQ(ierr);
1367d26aeb3SVaclav Hapla           ierr = DMSetFromOptions(dm);CHKERRQ(ierr);
1377d26aeb3SVaclav Hapla           ierr = DMViewFromOptions(dm, NULL, "-dm_view");CHKERRQ(ierr);
1387d26aeb3SVaclav Hapla         }
1397d26aeb3SVaclav Hapla       }
1407d26aeb3SVaclav Hapla 
1417d26aeb3SVaclav Hapla       /* Save redistributed dm to XDMF in parallel and destroy it */
1427d26aeb3SVaclav Hapla       ierr = PetscViewerHDF5Open(comm, user.outfile, FILE_MODE_WRITE, &v);CHKERRQ(ierr);
143*efa12513Sksagiyam       ierr = PetscViewerPushFormat(v, user.outformat);CHKERRQ(ierr);
1447d26aeb3SVaclav Hapla       ierr = DMView(dm, v);CHKERRQ(ierr);
1457d26aeb3SVaclav Hapla       ierr = PetscViewerPopFormat(v);CHKERRQ(ierr);
1467d26aeb3SVaclav Hapla       ierr = PetscViewerDestroy(&v);CHKERRQ(ierr);
1477d26aeb3SVaclav Hapla       ierr = DMDestroy(&dm);CHKERRQ(ierr);
1487d26aeb3SVaclav Hapla 
1497d26aeb3SVaclav Hapla       ierr = PetscPrintf(comm, "End   cycle %D\n--------\n",i);CHKERRQ(ierr);
1507d26aeb3SVaclav Hapla     }
151*efa12513Sksagiyam     ierr = MPI_Comm_free(&comm);CHKERRQ(ierr);
152*efa12513Sksagiyam     ierr = MPI_Barrier(PETSC_COMM_WORLD);CHKERRQ(ierr);
153*efa12513Sksagiyam   }
1547d26aeb3SVaclav Hapla 
1557d26aeb3SVaclav Hapla   /* Final clean-up */
1567d26aeb3SVaclav Hapla   ierr = PetscFinalize();
1577d26aeb3SVaclav Hapla   return ierr;
1587d26aeb3SVaclav Hapla }
1597d26aeb3SVaclav Hapla 
1607d26aeb3SVaclav Hapla /*TEST
1617d26aeb3SVaclav Hapla   build:
1627d26aeb3SVaclav Hapla     requires: hdf5
1637d26aeb3SVaclav Hapla   testset:
1647d26aeb3SVaclav Hapla     suffix: 0
1657d26aeb3SVaclav Hapla     requires: !complex
166*efa12513Sksagiyam     nsize: 4
1677d26aeb3SVaclav Hapla     args: -infile ${wPETSC_DIR}/share/petsc/datafiles/meshes/blockcylinder-50.h5 -informat hdf5_xdmf
1687d26aeb3SVaclav Hapla     args: -outfile ex5_dump.h5 -outformat {{hdf5_xdmf hdf5_petsc}separate output}
1697d26aeb3SVaclav Hapla     args: -ntimes 3
170*efa12513Sksagiyam     args: -loaded_dm_view -interpolated_dm_view -redistributed_dm_view
1717d26aeb3SVaclav Hapla     test:
1727d26aeb3SVaclav Hapla       # this partitioner should not shuffle anything, it should yield the same partititioning as the XDMF reader - added just for testing
1737d26aeb3SVaclav Hapla       suffix: simple
1747d26aeb3SVaclav Hapla       args: -petscpartitioner_type simple
1757d26aeb3SVaclav Hapla     test:
1767d26aeb3SVaclav Hapla       suffix: parmetis
1777d26aeb3SVaclav Hapla       requires: parmetis
1787d26aeb3SVaclav Hapla       args: -petscpartitioner_type parmetis
1797d26aeb3SVaclav Hapla     test:
1807d26aeb3SVaclav Hapla       suffix: ptscotch
1817d26aeb3SVaclav Hapla       requires: ptscotch
1827d26aeb3SVaclav Hapla       args: -petscpartitioner_type ptscotch
183*efa12513Sksagiyam 
184*efa12513Sksagiyam   testset:
185*efa12513Sksagiyam     suffix: 1
186*efa12513Sksagiyam     requires: !complex
187*efa12513Sksagiyam     nsize: 4
188*efa12513Sksagiyam     args: -infile ${wPETSC_DIR}/share/petsc/datafiles/meshes/blockcylinder-50.h5 -informat hdf5_xdmf
189*efa12513Sksagiyam     args: -outfile ex5_dump.h5 -outformat {{hdf5_xdmf hdf5_petsc}separate output}
190*efa12513Sksagiyam     args: -ntimes 3
191*efa12513Sksagiyam     args: -heterogeneous True
192*efa12513Sksagiyam     args: -loaded_dm_view -interpolated_dm_view -redistributed_dm_view
193*efa12513Sksagiyam     test:
194*efa12513Sksagiyam       suffix: simple
195*efa12513Sksagiyam       args: -petscpartitioner_type simple
196*efa12513Sksagiyam     test:
197*efa12513Sksagiyam       suffix: parmetis
198*efa12513Sksagiyam       requires: parmetis
199*efa12513Sksagiyam       args: -petscpartitioner_type parmetis
200*efa12513Sksagiyam     test:
201*efa12513Sksagiyam       suffix: ptscotch
202*efa12513Sksagiyam       requires: ptscotch
203*efa12513Sksagiyam       args: -petscpartitioner_type ptscotch
204*efa12513Sksagiyam 
2057d26aeb3SVaclav Hapla TEST*/
206