xref: /petsc/src/dm/impls/plex/tutorials/ex5.c (revision ae8b063e4f7e8ac6d2de6909aa6d539b126b38d9)
1 static char help[] = "Demonstrate HDF5/XDMF load-save-reload cycle\n\n";
2 
3 #include <petscdmplex.h>
4 #include <petscviewerhdf5.h>
5 #define EX "ex5.c"
6 
7 typedef struct {
8   char              infile[PETSC_MAX_PATH_LEN];  /* Input mesh filename */
9   char              outfile[PETSC_MAX_PATH_LEN]; /* Dump/reload mesh filename */
10   PetscViewerFormat informat;                    /* Input mesh format */
11   PetscViewerFormat outformat;                   /* Dump/reload mesh format */
12   PetscBool         redistribute;                /* Redistribute the mesh */
13   PetscBool         heterogeneous;               /* Test save on N / load on M */
14   PetscInt          ntimes;                      /* How many times do the cycle */
15 } AppCtx;
16 
17 static PetscErrorCode ProcessOptions(MPI_Comm comm, AppCtx *options)
18 {
19   PetscBool      flg;
20   PetscErrorCode ierr;
21 
22   PetscFunctionBeginUser;
23   options->infile[0]     = '\0';
24   options->outfile[0]    = '\0';
25   options->informat      = PETSC_VIEWER_HDF5_XDMF;
26   options->outformat     = PETSC_VIEWER_HDF5_XDMF;
27   options->redistribute  = PETSC_TRUE;
28   options->heterogeneous = PETSC_FALSE;
29   options->ntimes        = 2;
30   ierr = PetscOptionsBegin(comm, "", "Meshing Problem Options", "DMPLEX");CHKERRQ(ierr);
31   ierr = PetscOptionsString("-infile", "The input mesh file", EX, options->infile, options->infile, sizeof(options->infile), &flg);CHKERRQ(ierr);
32   PetscCheckFalse(!flg,comm, PETSC_ERR_USER_INPUT, "-infile needs to be specified");
33   ierr = PetscOptionsString("-outfile", "The output mesh file (by default it's the same as infile)", EX, options->outfile, options->outfile, sizeof(options->outfile), &flg);CHKERRQ(ierr);
34   PetscCheckFalse(!flg,comm, PETSC_ERR_USER_INPUT, "-outfile needs to be specified");
35   ierr = PetscOptionsEnum("-informat", "Input mesh format", EX, PetscViewerFormats, (PetscEnum)options->informat, (PetscEnum*)&options->informat, NULL);CHKERRQ(ierr);
36   ierr = PetscOptionsEnum("-outformat", "Dump/reload mesh format", EX, PetscViewerFormats, (PetscEnum)options->outformat, (PetscEnum*)&options->outformat, NULL);CHKERRQ(ierr);
37   ierr = PetscOptionsBool("-redistribute", "Redistribute the mesh", EX, options->redistribute, &options->redistribute, NULL);CHKERRQ(ierr);
38   ierr = PetscOptionsBool("-heterogeneous", "Test save on N / load on M", EX, options->heterogeneous, &options->heterogeneous, NULL);CHKERRQ(ierr);
39   ierr = PetscOptionsInt("-ntimes", "How many times do the cycle", EX, options->ntimes, &options->ntimes, NULL);CHKERRQ(ierr);
40   ierr = PetscOptionsEnd();CHKERRQ(ierr);
41   PetscFunctionReturn(0);
42 };
43 
44 //TODO test DMLabel I/O (not yet working for PETSC_VIEWER_HDF5_XDMF)
45 int main(int argc, char **argv)
46 {
47   AppCtx            user;
48   MPI_Comm          comm;
49   PetscMPIInt       gsize, grank, mycolor;
50   PetscInt          i;
51   PetscBool         flg;
52   PetscErrorCode    ierr;
53   const char        exampleDMPlexName[] = "DMPlex Object";
54   const char        *infilename;
55   PetscViewerFormat informat;
56 
57   ierr = PetscInitialize(&argc, &argv, NULL,help);if (ierr) return ierr;
58   ierr = ProcessOptions(PETSC_COMM_WORLD, &user);CHKERRQ(ierr);
59   ierr = MPI_Comm_size(PETSC_COMM_WORLD,&gsize);CHKERRMPI(ierr);
60   ierr = MPI_Comm_rank(PETSC_COMM_WORLD,&grank);CHKERRMPI(ierr);
61 
62   for (i=0; i<user.ntimes; i++) {
63     if (i==0) {
64       /* Use infile/informat for the initial load */
65       infilename = user.infile;
66       informat   = user.informat;
67     } else {
68       /* Use outfile/outformat for all I/O except the very initial load */
69       infilename = user.outfile;
70       informat   = user.outformat;
71     }
72 
73     if (user.heterogeneous) {
74       mycolor = (PetscMPIInt)(grank > user.ntimes-i);
75     } else {
76       mycolor = (PetscMPIInt)0;
77       /* comm = PETSC_COMM_WORLD; */
78     }
79     ierr = MPI_Comm_split(PETSC_COMM_WORLD,mycolor,grank,&comm);CHKERRMPI(ierr);
80 
81     if (mycolor == 0) {
82       /* Load/Save only on processes with mycolor == 0 */
83       DM                dm;
84       PetscPartitioner  part;
85       PetscViewer       v;
86 
87       ierr = PetscPrintf(comm, "Begin cycle %D\n",i);CHKERRQ(ierr);
88 
89       /* Load data from XDMF into dm in parallel */
90       /* We could also use
91           ierr = DMPlexCreateFromFile(PETSC_COMM_WORLD, user.filename, "ex5_plex", PETSC_TRUE, &dm);CHKERRQ(ierr);
92         This currently support a few more formats than DMLoad().
93       */
94       ierr = PetscViewerHDF5Open(comm, infilename, FILE_MODE_READ, &v);CHKERRQ(ierr);
95       ierr = PetscViewerPushFormat(v, informat);CHKERRQ(ierr);
96       ierr = DMCreate(comm, &dm);CHKERRQ(ierr);
97       ierr = DMSetType(dm, DMPLEX);CHKERRQ(ierr);
98       ierr = PetscObjectSetName((PetscObject) dm, exampleDMPlexName);CHKERRQ(ierr);
99       ierr = DMSetOptionsPrefix(dm,"loaded_");CHKERRQ(ierr);
100       ierr = DMLoad(dm, v);CHKERRQ(ierr);
101       ierr = DMPlexDistributeSetDefault(dm, PETSC_FALSE);CHKERRQ(ierr);
102       ierr = DMSetFromOptions(dm);CHKERRQ(ierr);
103       ierr = DMViewFromOptions(dm, NULL, "-dm_view");CHKERRQ(ierr);
104       ierr = PetscViewerPopFormat(v);CHKERRQ(ierr);
105       ierr = PetscViewerDestroy(&v);CHKERRQ(ierr);
106 
107       /* We just test/demonstrate DM is indeed distributed - unneeded in the application code */
108       ierr = DMPlexIsDistributed(dm, &flg);CHKERRQ(ierr);
109       ierr = PetscPrintf(comm, "Loaded mesh distributed? %s\n", PetscBools[flg]);CHKERRQ(ierr);
110 
111       /* Interpolate */
112       //TODO we want to be able to do this from options in DMSetFromOptions() probably
113       //TODO we want to be able to do this in-place
114       {
115         DM idm;
116 
117         ierr = DMPlexInterpolate(dm, &idm);CHKERRQ(ierr);
118         ierr = DMDestroy(&dm);CHKERRQ(ierr);
119         dm   = idm;
120           ierr = DMSetOptionsPrefix(dm,"interpolated_");CHKERRQ(ierr);
121           ierr = DMSetFromOptions(dm);CHKERRQ(ierr);
122           ierr = DMViewFromOptions(dm, NULL, "-dm_view");CHKERRQ(ierr);
123       }
124 
125       /* Redistribute */
126       //TODO we want to be able to do this from options in DMSetFromOptions() probably
127       if (user.redistribute) {
128         DM dmdist;
129 
130         ierr = DMPlexGetPartitioner(dm, &part);CHKERRQ(ierr);
131         ierr = PetscPartitionerSetFromOptions(part);CHKERRQ(ierr);
132         ierr = DMPlexDistribute(dm, 0, NULL, &dmdist);CHKERRQ(ierr);
133         //TODO we want to be able to do this in-place
134         if (dmdist) {
135           ierr = DMDestroy(&dm);CHKERRQ(ierr);
136           dm   = dmdist;
137           ierr = DMSetOptionsPrefix(dm,"redistributed_");CHKERRQ(ierr);
138           ierr = DMSetFromOptions(dm);CHKERRQ(ierr);
139           ierr = DMViewFromOptions(dm, NULL, "-dm_view");CHKERRQ(ierr);
140         }
141       }
142 
143       /* Save redistributed dm to XDMF in parallel and destroy it */
144       ierr = PetscViewerHDF5Open(comm, user.outfile, FILE_MODE_WRITE, &v);CHKERRQ(ierr);
145       ierr = PetscViewerPushFormat(v, user.outformat);CHKERRQ(ierr);
146       ierr = PetscObjectSetName((PetscObject) dm, exampleDMPlexName);CHKERRQ(ierr);
147       ierr = DMView(dm, v);CHKERRQ(ierr);
148       ierr = PetscViewerPopFormat(v);CHKERRQ(ierr);
149       ierr = PetscViewerDestroy(&v);CHKERRQ(ierr);
150       ierr = DMDestroy(&dm);CHKERRQ(ierr);
151 
152       ierr = PetscPrintf(comm, "End   cycle %D\n--------\n",i);CHKERRQ(ierr);
153     }
154     ierr = MPI_Comm_free(&comm);CHKERRMPI(ierr);
155     ierr = MPI_Barrier(PETSC_COMM_WORLD);CHKERRMPI(ierr);
156   }
157 
158   /* Final clean-up */
159   ierr = PetscFinalize();
160   return ierr;
161 }
162 
163 /*TEST
164   build:
165     requires: hdf5
166   testset:
167     suffix: 0
168     requires: !complex
169     nsize: 4
170     args: -infile ${wPETSC_DIR}/share/petsc/datafiles/meshes/blockcylinder-50.h5 -informat hdf5_xdmf
171     args: -outfile ex5_dump.h5 -outformat {{hdf5_xdmf hdf5_petsc}separate output}
172     args: -ntimes 3
173     args: -loaded_dm_view -interpolated_dm_view -redistributed_dm_view
174     test:
175       # this partitioner should not shuffle anything, it should yield the same partititioning as the XDMF reader - added just for testing
176       suffix: simple
177       args: -petscpartitioner_type simple
178     test:
179       suffix: parmetis
180       requires: parmetis
181       args: -petscpartitioner_type parmetis
182     test:
183       suffix: ptscotch
184       requires: ptscotch
185       args: -petscpartitioner_type ptscotch
186 
187   testset:
188     suffix: 1
189     requires: !complex
190     nsize: 4
191     args: -infile ${wPETSC_DIR}/share/petsc/datafiles/meshes/blockcylinder-50.h5 -informat hdf5_xdmf
192     args: -outfile ex5_dump.h5 -outformat {{hdf5_xdmf hdf5_petsc}separate output}
193     args: -ntimes 3
194     args: -heterogeneous True
195     args: -loaded_dm_view -interpolated_dm_view -redistributed_dm_view
196     test:
197       suffix: simple
198       args: -petscpartitioner_type simple
199     test:
200       suffix: parmetis
201       requires: parmetis
202       args: -petscpartitioner_type parmetis
203     test:
204       suffix: ptscotch
205       requires: ptscotch
206       args: -petscpartitioner_type ptscotch
207 
208 TEST*/
209