xref: /petsc/src/dm/impls/plex/tutorials/ex5.c (revision c9ad657ee665028d2ed9861512a1c779b61e2acc)
17d26aeb3SVaclav Hapla static char help[] = "Demonstrate HDF5/XDMF load-save-reload cycle\n\n";
27d26aeb3SVaclav Hapla 
37d26aeb3SVaclav Hapla #include <petscdmplex.h>
47d26aeb3SVaclav Hapla #include <petscviewerhdf5.h>
57d26aeb3SVaclav Hapla #define EX "ex5.c"
67d26aeb3SVaclav Hapla 
77d26aeb3SVaclav Hapla typedef struct {
87d26aeb3SVaclav Hapla   char              infile[PETSC_MAX_PATH_LEN];  /* Input mesh filename */
97d26aeb3SVaclav Hapla   char              outfile[PETSC_MAX_PATH_LEN]; /* Dump/reload mesh filename */
107d26aeb3SVaclav Hapla   PetscViewerFormat informat;                    /* Input mesh format */
117d26aeb3SVaclav Hapla   PetscViewerFormat outformat;                   /* Dump/reload mesh format */
127d26aeb3SVaclav Hapla   PetscBool         redistribute;                /* Redistribute the mesh */
13efa12513Sksagiyam   PetscBool         heterogeneous;               /* Test save on N / load on M */
147d26aeb3SVaclav Hapla   PetscInt          ntimes;                      /* How many times do the cycle */
157d26aeb3SVaclav Hapla } AppCtx;
167d26aeb3SVaclav Hapla 
177d26aeb3SVaclav Hapla static PetscErrorCode ProcessOptions(MPI_Comm comm, AppCtx *options)
187d26aeb3SVaclav Hapla {
197d26aeb3SVaclav Hapla   PetscBool      flg;
207d26aeb3SVaclav Hapla   PetscErrorCode ierr;
217d26aeb3SVaclav Hapla 
227d26aeb3SVaclav Hapla   PetscFunctionBeginUser;
237d26aeb3SVaclav Hapla   options->infile[0]     = '\0';
247d26aeb3SVaclav Hapla   options->outfile[0]    = '\0';
257d26aeb3SVaclav Hapla   options->informat      = PETSC_VIEWER_HDF5_XDMF;
267d26aeb3SVaclav Hapla   options->outformat     = PETSC_VIEWER_HDF5_XDMF;
277d26aeb3SVaclav Hapla   options->redistribute  = PETSC_TRUE;
28efa12513Sksagiyam   options->heterogeneous = PETSC_FALSE;
297d26aeb3SVaclav Hapla   options->ntimes        = 2;
307d26aeb3SVaclav Hapla   ierr = PetscOptionsBegin(comm, "", "Meshing Problem Options", "DMPLEX");CHKERRQ(ierr);
317d26aeb3SVaclav Hapla   ierr = PetscOptionsString("-infile", "The input mesh file", EX, options->infile, options->infile, sizeof(options->infile), &flg);CHKERRQ(ierr);
327d26aeb3SVaclav Hapla   if (!flg) SETERRQ(comm, PETSC_ERR_USER_INPUT, "-infile needs to be specified");
337d26aeb3SVaclav Hapla   ierr = PetscOptionsString("-outfile", "The output mesh file (by default it's the same as infile)", EX, options->outfile, options->outfile, sizeof(options->outfile), &flg);CHKERRQ(ierr);
347d26aeb3SVaclav Hapla   if (!flg) SETERRQ(comm, PETSC_ERR_USER_INPUT, "-outfile needs to be specified");
357d26aeb3SVaclav Hapla   ierr = PetscOptionsEnum("-informat", "Input mesh format", EX, PetscViewerFormats, (PetscEnum)options->informat, (PetscEnum*)&options->informat, NULL);CHKERRQ(ierr);
367d26aeb3SVaclav Hapla   ierr = PetscOptionsEnum("-outformat", "Dump/reload mesh format", EX, PetscViewerFormats, (PetscEnum)options->outformat, (PetscEnum*)&options->outformat, NULL);CHKERRQ(ierr);
377d26aeb3SVaclav Hapla   ierr = PetscOptionsBool("-redistribute", "Redistribute the mesh", EX, options->redistribute, &options->redistribute, NULL);CHKERRQ(ierr);
38efa12513Sksagiyam   ierr = PetscOptionsBool("-heterogeneous", "Test save on N / load on M", EX, options->heterogeneous, &options->heterogeneous, NULL);CHKERRQ(ierr);
397d26aeb3SVaclav Hapla   ierr = PetscOptionsInt("-ntimes", "How many times do the cycle", EX, options->ntimes, &options->ntimes, NULL);CHKERRQ(ierr);
401e1ea65dSPierre Jolivet   ierr = PetscOptionsEnd();CHKERRQ(ierr);
417d26aeb3SVaclav Hapla   PetscFunctionReturn(0);
427d26aeb3SVaclav Hapla };
437d26aeb3SVaclav Hapla 
447d26aeb3SVaclav Hapla //TODO test DMLabel I/O (not yet working for PETSC_VIEWER_HDF5_XDMF)
457d26aeb3SVaclav Hapla int main(int argc, char **argv)
467d26aeb3SVaclav Hapla {
477d26aeb3SVaclav Hapla   AppCtx            user;
48efa12513Sksagiyam   MPI_Comm          comm;
49efa12513Sksagiyam   PetscMPIInt       gsize, grank, mycolor;
507d26aeb3SVaclav Hapla   PetscInt          i;
517d26aeb3SVaclav Hapla   PetscBool         flg;
527d26aeb3SVaclav Hapla   PetscErrorCode    ierr;
53*c9ad657eSksagiyam   const char        exampleDMPlexName[] = "DMPlex Object";
54efa12513Sksagiyam   const char        *infilename;
55efa12513Sksagiyam   PetscViewerFormat informat;
567d26aeb3SVaclav Hapla 
577d26aeb3SVaclav Hapla   ierr = PetscInitialize(&argc, &argv, NULL,help);if (ierr) return ierr;
58efa12513Sksagiyam   ierr = ProcessOptions(PETSC_COMM_WORLD, &user);CHKERRQ(ierr);
5955b25c41SPierre Jolivet   ierr = MPI_Comm_size(PETSC_COMM_WORLD,&gsize);CHKERRMPI(ierr);
6055b25c41SPierre Jolivet   ierr = MPI_Comm_rank(PETSC_COMM_WORLD,&grank);CHKERRMPI(ierr);
617d26aeb3SVaclav Hapla 
627d26aeb3SVaclav Hapla   for (i=0; i<user.ntimes; i++) {
63efa12513Sksagiyam     if (i==0) {
64efa12513Sksagiyam       /* Use infile/informat for the initial load */
65efa12513Sksagiyam       infilename = user.infile;
66efa12513Sksagiyam       informat   = user.informat;
67efa12513Sksagiyam     } else {
68efa12513Sksagiyam       /* Use outfile/outformat for all I/O except the very initial load */
69efa12513Sksagiyam       infilename = user.outfile;
70efa12513Sksagiyam       informat   = user.outformat;
71efa12513Sksagiyam     }
72efa12513Sksagiyam 
73efa12513Sksagiyam     if (user.heterogeneous) {
74efa12513Sksagiyam       mycolor = (PetscMPIInt)(grank > user.ntimes-i);
75efa12513Sksagiyam     } else {
76efa12513Sksagiyam       mycolor = (PetscMPIInt)0;
77efa12513Sksagiyam       /* comm = PETSC_COMM_WORLD; */
78efa12513Sksagiyam     }
7955b25c41SPierre Jolivet     ierr = MPI_Comm_split(PETSC_COMM_WORLD,mycolor,grank,&comm);CHKERRMPI(ierr);
80efa12513Sksagiyam 
81efa12513Sksagiyam     if (mycolor == 0) {
82efa12513Sksagiyam       /* Load/Save only on processes with mycolor == 0 */
837d26aeb3SVaclav Hapla       DM                dm;
847d26aeb3SVaclav Hapla       PetscPartitioner  part;
857d26aeb3SVaclav Hapla       PetscViewer       v;
867d26aeb3SVaclav Hapla 
877d26aeb3SVaclav Hapla       ierr = PetscPrintf(comm, "Begin cycle %D\n",i);CHKERRQ(ierr);
887d26aeb3SVaclav Hapla 
897d26aeb3SVaclav Hapla       /* Load data from XDMF into dm in parallel */
907d26aeb3SVaclav Hapla       /* We could also use
91cd7e8a5eSksagiyam           ierr = DMPlexCreateFromFile(PETSC_COMM_WORLD, user.filename, "ex5_plex", PETSC_TRUE, &dm);CHKERRQ(ierr);
927d26aeb3SVaclav Hapla         This currently support a few more formats than DMLoad().
937d26aeb3SVaclav Hapla       */
94efa12513Sksagiyam       ierr = PetscViewerHDF5Open(comm, infilename, FILE_MODE_READ, &v);CHKERRQ(ierr);
95efa12513Sksagiyam       ierr = PetscViewerPushFormat(v, informat);CHKERRQ(ierr);
967d26aeb3SVaclav Hapla       ierr = DMCreate(comm, &dm);CHKERRQ(ierr);
977d26aeb3SVaclav Hapla       ierr = DMSetType(dm, DMPLEX);CHKERRQ(ierr);
98*c9ad657eSksagiyam       ierr = PetscObjectSetName((PetscObject) dm, exampleDMPlexName);CHKERRQ(ierr);
997d26aeb3SVaclav Hapla       ierr = DMSetOptionsPrefix(dm,"loaded_");CHKERRQ(ierr);
1007d26aeb3SVaclav Hapla       ierr = DMLoad(dm, v);CHKERRQ(ierr);
1017d26aeb3SVaclav Hapla       ierr = DMSetFromOptions(dm);CHKERRQ(ierr);
1027d26aeb3SVaclav Hapla       ierr = DMViewFromOptions(dm, NULL, "-dm_view");CHKERRQ(ierr);
1037d26aeb3SVaclav Hapla       ierr = PetscViewerPopFormat(v);CHKERRQ(ierr);
1047d26aeb3SVaclav Hapla       ierr = PetscViewerDestroy(&v);CHKERRQ(ierr);
1057d26aeb3SVaclav Hapla 
1067d26aeb3SVaclav Hapla       /* We just test/demonstrate DM is indeed distributed - unneeded in the application code */
1077d26aeb3SVaclav Hapla       ierr = DMPlexIsDistributed(dm, &flg);CHKERRQ(ierr);
1081e1ea65dSPierre Jolivet       ierr = PetscPrintf(comm, "Loaded mesh distributed? %s\n", PetscBools[flg]);CHKERRQ(ierr);
1097d26aeb3SVaclav Hapla 
1107d26aeb3SVaclav Hapla       /* Interpolate */
1117d26aeb3SVaclav Hapla       //TODO we want to be able to do this from options in DMSetFromOptions() probably
1127d26aeb3SVaclav Hapla       //TODO we want to be able to do this in-place
1137d26aeb3SVaclav Hapla       {
1147d26aeb3SVaclav Hapla         DM idm;
1157d26aeb3SVaclav Hapla 
1167d26aeb3SVaclav Hapla         ierr = DMPlexInterpolate(dm, &idm);CHKERRQ(ierr);
1177d26aeb3SVaclav Hapla         ierr = DMDestroy(&dm);CHKERRQ(ierr);
1187d26aeb3SVaclav Hapla         dm   = idm;
1197d26aeb3SVaclav Hapla           ierr = DMSetOptionsPrefix(dm,"interpolated_");CHKERRQ(ierr);
1207d26aeb3SVaclav Hapla           ierr = DMSetFromOptions(dm);CHKERRQ(ierr);
1217d26aeb3SVaclav Hapla           ierr = DMViewFromOptions(dm, NULL, "-dm_view");CHKERRQ(ierr);
1227d26aeb3SVaclav Hapla       }
1237d26aeb3SVaclav Hapla 
1247d26aeb3SVaclav Hapla       /* Redistribute */
1257d26aeb3SVaclav Hapla       //TODO we want to be able to do this from options in DMSetFromOptions() probably
1267d26aeb3SVaclav Hapla       if (user.redistribute) {
1277d26aeb3SVaclav Hapla         DM dmdist;
1287d26aeb3SVaclav Hapla 
1297d26aeb3SVaclav Hapla         ierr = DMPlexGetPartitioner(dm, &part);CHKERRQ(ierr);
1307d26aeb3SVaclav Hapla         ierr = PetscPartitionerSetFromOptions(part);CHKERRQ(ierr);
1317d26aeb3SVaclav Hapla         ierr = DMPlexDistribute(dm, 0, NULL, &dmdist);CHKERRQ(ierr);
1327d26aeb3SVaclav Hapla         //TODO we want to be able to do this in-place
1337d26aeb3SVaclav Hapla         if (dmdist) {
1347d26aeb3SVaclav Hapla           ierr = DMDestroy(&dm);CHKERRQ(ierr);
1357d26aeb3SVaclav Hapla           dm   = dmdist;
1367d26aeb3SVaclav Hapla           ierr = DMSetOptionsPrefix(dm,"redistributed_");CHKERRQ(ierr);
1377d26aeb3SVaclav Hapla           ierr = DMSetFromOptions(dm);CHKERRQ(ierr);
1387d26aeb3SVaclav Hapla           ierr = DMViewFromOptions(dm, NULL, "-dm_view");CHKERRQ(ierr);
1397d26aeb3SVaclav Hapla         }
1407d26aeb3SVaclav Hapla       }
1417d26aeb3SVaclav Hapla 
1427d26aeb3SVaclav Hapla       /* Save redistributed dm to XDMF in parallel and destroy it */
1437d26aeb3SVaclav Hapla       ierr = PetscViewerHDF5Open(comm, user.outfile, FILE_MODE_WRITE, &v);CHKERRQ(ierr);
144efa12513Sksagiyam       ierr = PetscViewerPushFormat(v, user.outformat);CHKERRQ(ierr);
145*c9ad657eSksagiyam       ierr = PetscObjectSetName((PetscObject) dm, exampleDMPlexName);CHKERRQ(ierr);
1467d26aeb3SVaclav Hapla       ierr = DMView(dm, v);CHKERRQ(ierr);
1477d26aeb3SVaclav Hapla       ierr = PetscViewerPopFormat(v);CHKERRQ(ierr);
1487d26aeb3SVaclav Hapla       ierr = PetscViewerDestroy(&v);CHKERRQ(ierr);
1497d26aeb3SVaclav Hapla       ierr = DMDestroy(&dm);CHKERRQ(ierr);
1507d26aeb3SVaclav Hapla 
1517d26aeb3SVaclav Hapla       ierr = PetscPrintf(comm, "End   cycle %D\n--------\n",i);CHKERRQ(ierr);
1527d26aeb3SVaclav Hapla     }
15355b25c41SPierre Jolivet     ierr = MPI_Comm_free(&comm);CHKERRMPI(ierr);
15455b25c41SPierre Jolivet     ierr = MPI_Barrier(PETSC_COMM_WORLD);CHKERRMPI(ierr);
155efa12513Sksagiyam   }
1567d26aeb3SVaclav Hapla 
1577d26aeb3SVaclav Hapla   /* Final clean-up */
1587d26aeb3SVaclav Hapla   ierr = PetscFinalize();
1597d26aeb3SVaclav Hapla   return ierr;
1607d26aeb3SVaclav Hapla }
1617d26aeb3SVaclav Hapla 
1627d26aeb3SVaclav Hapla /*TEST
1637d26aeb3SVaclav Hapla   build:
1647d26aeb3SVaclav Hapla     requires: hdf5
1657d26aeb3SVaclav Hapla   testset:
1667d26aeb3SVaclav Hapla     suffix: 0
1677d26aeb3SVaclav Hapla     requires: !complex
168efa12513Sksagiyam     nsize: 4
1697d26aeb3SVaclav Hapla     args: -infile ${wPETSC_DIR}/share/petsc/datafiles/meshes/blockcylinder-50.h5 -informat hdf5_xdmf
1707d26aeb3SVaclav Hapla     args: -outfile ex5_dump.h5 -outformat {{hdf5_xdmf hdf5_petsc}separate output}
1717d26aeb3SVaclav Hapla     args: -ntimes 3
172efa12513Sksagiyam     args: -loaded_dm_view -interpolated_dm_view -redistributed_dm_view
1737d26aeb3SVaclav Hapla     test:
1747d26aeb3SVaclav Hapla       # this partitioner should not shuffle anything, it should yield the same partititioning as the XDMF reader - added just for testing
1757d26aeb3SVaclav Hapla       suffix: simple
1767d26aeb3SVaclav Hapla       args: -petscpartitioner_type simple
1777d26aeb3SVaclav Hapla     test:
1787d26aeb3SVaclav Hapla       suffix: parmetis
1797d26aeb3SVaclav Hapla       requires: parmetis
1807d26aeb3SVaclav Hapla       args: -petscpartitioner_type parmetis
1817d26aeb3SVaclav Hapla     test:
1827d26aeb3SVaclav Hapla       suffix: ptscotch
1837d26aeb3SVaclav Hapla       requires: ptscotch
1847d26aeb3SVaclav Hapla       args: -petscpartitioner_type ptscotch
185efa12513Sksagiyam 
186efa12513Sksagiyam   testset:
187efa12513Sksagiyam     suffix: 1
188efa12513Sksagiyam     requires: !complex
189efa12513Sksagiyam     nsize: 4
190efa12513Sksagiyam     args: -infile ${wPETSC_DIR}/share/petsc/datafiles/meshes/blockcylinder-50.h5 -informat hdf5_xdmf
191efa12513Sksagiyam     args: -outfile ex5_dump.h5 -outformat {{hdf5_xdmf hdf5_petsc}separate output}
192efa12513Sksagiyam     args: -ntimes 3
193efa12513Sksagiyam     args: -heterogeneous True
194efa12513Sksagiyam     args: -loaded_dm_view -interpolated_dm_view -redistributed_dm_view
195efa12513Sksagiyam     test:
196efa12513Sksagiyam       suffix: simple
197efa12513Sksagiyam       args: -petscpartitioner_type simple
198efa12513Sksagiyam     test:
199efa12513Sksagiyam       suffix: parmetis
200efa12513Sksagiyam       requires: parmetis
201efa12513Sksagiyam       args: -petscpartitioner_type parmetis
202efa12513Sksagiyam     test:
203efa12513Sksagiyam       suffix: ptscotch
204efa12513Sksagiyam       requires: ptscotch
205efa12513Sksagiyam       args: -petscpartitioner_type ptscotch
206efa12513Sksagiyam 
2077d26aeb3SVaclav Hapla TEST*/
208