xref: /petsc/src/dm/impls/plex/tutorials/ex5.c (revision 9371c9d470a9602b6d10a8bf50c9b2280a79e45a)
1 static char help[] = "Demonstrate HDF5 parallel load-save-reload cycle\n\n";
2 
3 #include <petscdmplex.h>
4 #include <petscviewerhdf5.h>
5 #define EX "ex5.c"
6 
7 typedef struct {
8   char              infile[PETSC_MAX_PATH_LEN];  /* Input mesh filename */
9   char              outfile[PETSC_MAX_PATH_LEN]; /* Dump/reload mesh filename */
10   PetscViewerFormat informat;                    /* Input mesh format */
11   PetscViewerFormat outformat;                   /* Dump/reload mesh format */
12   PetscBool         heterogeneous;               /* Test save on N / load on M */
13   PetscInt          ntimes;                      /* How many times do the cycle */
14 } AppCtx;
15 
16 static PetscErrorCode ProcessOptions(MPI_Comm comm, AppCtx *options) {
17   PetscBool flg;
18 
19   PetscFunctionBeginUser;
20   options->infile[0]     = '\0';
21   options->outfile[0]    = '\0';
22   options->informat      = PETSC_VIEWER_HDF5_XDMF;
23   options->outformat     = PETSC_VIEWER_HDF5_XDMF;
24   options->heterogeneous = PETSC_FALSE;
25   options->ntimes        = 2;
26   PetscOptionsBegin(comm, "", "Meshing Problem Options", "DMPLEX");
27   PetscCall(PetscOptionsString("-infile", "The input mesh file", EX, options->infile, options->infile, sizeof(options->infile), &flg));
28   PetscCheck(flg, comm, PETSC_ERR_USER_INPUT, "-infile needs to be specified");
29   PetscCall(PetscOptionsString("-outfile", "The output mesh file (by default it's the same as infile)", EX, options->outfile, options->outfile, sizeof(options->outfile), &flg));
30   PetscCheck(flg, comm, PETSC_ERR_USER_INPUT, "-outfile needs to be specified");
31   PetscCall(PetscOptionsEnum("-informat", "Input mesh format", EX, PetscViewerFormats, (PetscEnum)options->informat, (PetscEnum *)&options->informat, NULL));
32   PetscCall(PetscOptionsEnum("-outformat", "Dump/reload mesh format", EX, PetscViewerFormats, (PetscEnum)options->outformat, (PetscEnum *)&options->outformat, NULL));
33   PetscCall(PetscOptionsBool("-heterogeneous", "Test save on N / load on M", EX, options->heterogeneous, &options->heterogeneous, NULL));
34   PetscCall(PetscOptionsInt("-ntimes", "How many times do the cycle", EX, options->ntimes, &options->ntimes, NULL));
35   PetscOptionsEnd();
36   PetscFunctionReturn(0);
37 };
38 
39 //TODO test DMLabel I/O (not yet working for PETSC_VIEWER_HDF5_XDMF)
40 int main(int argc, char **argv) {
41   AppCtx            user;
42   MPI_Comm          comm;
43   PetscMPIInt       gsize, grank, mycolor;
44   PetscInt          i;
45   PetscBool         flg;
46   const char        exampleDMPlexName[] = "DMPlex Object";
47   const char       *infilename;
48   PetscViewerFormat informat;
49 
50   PetscFunctionBeginUser;
51   PetscCall(PetscInitialize(&argc, &argv, NULL, help));
52   PetscCall(ProcessOptions(PETSC_COMM_WORLD, &user));
53   PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &gsize));
54   PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &grank));
55 
56   for (i = 0; i < user.ntimes; i++) {
57     if (i == 0) {
58       /* Use infile/informat for the initial load */
59       infilename = user.infile;
60       informat   = user.informat;
61     } else {
62       /* Use outfile/outformat for all I/O except the very initial load */
63       infilename = user.outfile;
64       informat   = user.outformat;
65     }
66 
67     if (user.heterogeneous) {
68       mycolor = (PetscMPIInt)(grank > user.ntimes - i);
69     } else {
70       mycolor = (PetscMPIInt)0;
71     }
72     PetscCallMPI(MPI_Comm_split(PETSC_COMM_WORLD, mycolor, grank, &comm));
73 
74     if (mycolor == 0) {
75       /* Load/Save only on processes with mycolor == 0 */
76       DM          dm;
77       PetscViewer v;
78 
79       PetscCall(PetscPrintf(comm, "Begin cycle %" PetscInt_FMT "\n", i));
80 
81       /* Load data from XDMF into dm in parallel */
82       /* We could also use
83           PetscCall(DMPlexCreateFromFile(PETSC_COMM_WORLD, user.filename, "ex5_plex", PETSC_TRUE, &dm));
84         This currently support a few more formats than DMLoad().
85       */
86       PetscCall(PetscViewerHDF5Open(comm, infilename, FILE_MODE_READ, &v));
87       PetscCall(PetscViewerPushFormat(v, informat));
88       PetscCall(DMCreate(comm, &dm));
89       PetscCall(DMSetType(dm, DMPLEX));
90       PetscCall(PetscObjectSetName((PetscObject)dm, exampleDMPlexName));
91       PetscCall(DMSetOptionsPrefix(dm, "loaded_"));
92       PetscCall(DMLoad(dm, v));
93       PetscCall(DMPlexDistributeSetDefault(dm, PETSC_FALSE));
94       PetscCall(DMSetFromOptions(dm));
95       PetscCall(DMViewFromOptions(dm, NULL, "-dm_view"));
96       PetscCall(PetscViewerPopFormat(v));
97       PetscCall(PetscViewerDestroy(&v));
98 
99       /* We just test/demonstrate DM is indeed distributed - unneeded in the application code */
100       PetscCall(DMPlexIsDistributed(dm, &flg));
101       PetscCall(PetscPrintf(comm, "Loaded mesh distributed? %s\n", PetscBools[flg]));
102 
103       /* Interpolate */
104       PetscCall(DMSetOptionsPrefix(dm, "interpolated_"));
105       PetscCall(DMSetFromOptions(dm));
106       PetscCall(DMViewFromOptions(dm, NULL, "-dm_view"));
107 
108       /* Redistribute */
109       PetscCall(DMSetOptionsPrefix(dm, "redistributed_"));
110       PetscCall(DMSetFromOptions(dm));
111       PetscCall(DMViewFromOptions(dm, NULL, "-dm_view"));
112 
113       /* Save redistributed dm to XDMF in parallel and destroy it */
114       PetscCall(PetscViewerHDF5Open(comm, user.outfile, FILE_MODE_WRITE, &v));
115       PetscCall(PetscViewerPushFormat(v, user.outformat));
116       PetscCall(PetscObjectSetName((PetscObject)dm, exampleDMPlexName));
117       PetscCall(DMView(dm, v));
118       PetscCall(PetscViewerPopFormat(v));
119       PetscCall(PetscViewerDestroy(&v));
120       PetscCall(DMDestroy(&dm));
121 
122       PetscCall(PetscPrintf(comm, "End   cycle %" PetscInt_FMT "\n--------\n", i));
123     }
124     PetscCallMPI(MPI_Comm_free(&comm));
125     PetscCallMPI(MPI_Barrier(PETSC_COMM_WORLD));
126   }
127 
128   /* Final clean-up */
129   PetscCall(PetscFinalize());
130   return 0;
131 }
132 
133 /*TEST
134   build:
135     requires: hdf5
136   testset:
137     suffix: 0
138     requires: !complex
139     nsize: 4
140     args: -infile ${wPETSC_DIR}/share/petsc/datafiles/meshes/blockcylinder-50.h5 -informat hdf5_xdmf
141     args: -outfile ex5_dump.h5 -outformat {{hdf5_xdmf hdf5_petsc}separate output}
142     args: -ntimes 3 -interpolated_dm_plex_interpolate_pre -redistributed_dm_distribute
143     args: -loaded_dm_view -interpolated_dm_view -redistributed_dm_view
144     test:
145       # this partitioner should not shuffle anything, it should yield the same partititioning as the XDMF reader - added just for testing
146       suffix: simple
147       args: -petscpartitioner_type simple
148     test:
149       suffix: parmetis
150       requires: parmetis
151       args: -petscpartitioner_type parmetis
152     test:
153       suffix: ptscotch
154       requires: ptscotch
155       args: -petscpartitioner_type ptscotch
156 
157   testset:
158     suffix: 1
159     requires: !complex
160     nsize: 4
161     args: -infile ${wPETSC_DIR}/share/petsc/datafiles/meshes/blockcylinder-50.h5 -informat hdf5_xdmf
162     args: -outfile ex5_dump.h5 -outformat {{hdf5_xdmf hdf5_petsc}separate output}
163     args: -ntimes 3 -interpolated_dm_plex_interpolate_pre -redistributed_dm_distribute
164     args: -heterogeneous True
165     args: -loaded_dm_view -interpolated_dm_view -redistributed_dm_view
166     test:
167       suffix: simple
168       args: -petscpartitioner_type simple
169     test:
170       suffix: parmetis
171       requires: parmetis
172       args: -petscpartitioner_type parmetis
173     test:
174       suffix: ptscotch
175       requires: ptscotch
176       args: -petscpartitioner_type ptscotch
177 
178 TEST*/
179