xref: /petsc/src/tao/tutorials/ex3.c (revision f97672e55eacc8688507b9471cd7ec2664d7f203)
1 static char help[] = "Reduced formulation of the mother problem of PDE-constrained optimisation.\n";
2 
3 /*F
4   We solve the mother problem
5 
6   min 1/2 || y - y_d ||^2_2 + \alpha/2 || u ||^2_2
7 
8   subject to
9 
10           - \laplace y = u          on \Omega
11                      y = 0          on \Gamma
12 
13   where u is in L^2 and y is in H^1_0.
14 
15   We formulate the reduced problem solely in terms of the control
16   by using the state equation to express y in terms of u, and then
17   apply LMVM/BLMVM to the resulting reduced problem.
18 
19   Mesh independence is achieved by configuring the Riesz map for the control
20   space.
21 
22   Example meshes where the Riesz map is crucial can be downloaded from the
23   http://people.maths.ox.ac.uk/~farrellp/meshes.tar.gz
24 
25   Contributed by: Patrick Farrell <patrick.farrell@maths.ox.ac.uk>
26 
27   Run with e.g.:
28   ./ex3 -laplace_ksp_type cg -laplace_pc_type hypre -mat_lmvm_ksp_type cg -mat_lmvm_pc_type gamg -laplace_ksp_monitor_true_residual -tao_monitor -petscspace_degree 1 -tao_converged_reason -tao_gatol 1.0e-9 -dm_view hdf5:solution.h5 -sol_view hdf5:solution.h5::append -use_riesz 1 -f $DATAFILESPATH/meshes/mesh-1.h5
29 
30   and visualise in paraview with ../../../../petsc_gen_xdmf.py solution.h5.
31 
32   Toggle the Riesz map (-use_riesz 0) to see the difference setting the Riesz maps makes.
33 
34   TODO: broken for parallel runs
35 F*/
36 
37 #include <petsc.h>
38 #include <petscfe.h>
39 #include <petscviewerhdf5.h>
40 
41 typedef struct {
42   DM  dm;
43   Mat mass;
44   Vec data;
45   Vec state;
46   Vec tmp1;
47   Vec tmp2;
48   Vec adjoint;
49   Mat laplace;
50   KSP ksp_laplace;
51   PetscInt  num_bc_dofs;
52   PetscInt* bc_indices;
53   PetscScalar* bc_values;
54   PetscBool use_riesz;
55 } AppCtx;
56 
57 static PetscErrorCode CreateMesh(MPI_Comm comm, AppCtx *user, DM *dm)
58 {
59   PetscBool      flg;
60   char           filename[2048];
61 
62   PetscFunctionBeginUser;
63   filename[0] = '\0';
64   user->use_riesz = PETSC_TRUE;
65 
66   PetscOptionsBegin(comm, "", "Poisson mother problem options", "DMPLEX");
67   PetscCall(PetscOptionsBool("-use_riesz", "Use the Riesz map to achieve mesh independence", "ex3.c", user->use_riesz, &user->use_riesz, NULL));
68   PetscCall(PetscOptionsString("-f", "filename to read", "ex3.c", filename, filename, sizeof(filename), &flg));
69   PetscOptionsEnd();
70 
71   if (!flg) {
72     PetscCall(DMCreate(comm, dm));
73     PetscCall(DMSetType(*dm, DMPLEX));
74   } else {
75     /* TODO Eliminate this in favor of DMLoad() in new code */
76 #if defined(PETSC_HAVE_HDF5)
77     const PetscInt vertices_per_cell = 3;
78     PetscViewer    viewer;
79     Vec            coordinates;
80     Vec            topology;
81     PetscInt       dim = 2, numCells;
82     PetscInt       numVertices;
83     PetscScalar*   coords;
84     PetscScalar*   topo_f;
85     PetscInt*      cells;
86     PetscInt       j;
87     DMLabel        label;
88 
89     /* Read in FEniCS HDF5 output */
90     PetscCall(PetscViewerHDF5Open(comm, filename, FILE_MODE_READ, &viewer));
91 
92     /* create Vecs to read in the data from H5 */
93     PetscCall(VecCreate(comm, &coordinates));
94     PetscCall(PetscObjectSetName((PetscObject)coordinates, "coordinates"));
95     PetscCall(VecSetBlockSize(coordinates, dim));
96     PetscCall(VecCreate(comm, &topology));
97     PetscCall(PetscObjectSetName((PetscObject)topology, "topology"));
98     PetscCall(VecSetBlockSize(topology, vertices_per_cell));
99 
100     /* navigate to the right group */
101     PetscCall(PetscViewerHDF5PushGroup(viewer, "/Mesh/mesh"));
102 
103     /* Read the Vecs */
104     PetscCall(VecLoad(coordinates, viewer));
105     PetscCall(VecLoad(topology, viewer));
106 
107     /* do some ugly calculations */
108     PetscCall(VecGetSize(topology, &numCells));
109     numCells = numCells / vertices_per_cell;
110     PetscCall(VecGetSize(coordinates, &numVertices));
111     numVertices = numVertices / dim;
112 
113     PetscCall(VecGetArray(coordinates, &coords));
114     PetscCall(VecGetArray(topology, &topo_f));
115     /* and now we have to convert the double representation to integers to pass over, argh */
116     PetscCall(PetscMalloc1(numCells*vertices_per_cell, &cells));
117     for (j = 0; j < numCells*vertices_per_cell; j++) {
118       cells[j] = (PetscInt) topo_f[j];
119     }
120 
121     /* Now create the DM */
122     PetscCall(DMPlexCreateFromCellListPetsc(comm, dim, numCells, numVertices, vertices_per_cell, PETSC_TRUE, cells, dim, coords, dm));
123     /* Check for flipped first cell */
124     {
125       PetscReal v0[3], J[9], invJ[9], detJ;
126 
127       PetscCall(DMPlexComputeCellGeometryFEM(*dm, 0, NULL, v0, J, invJ, &detJ));
128       if (detJ < 0) {
129         PetscCall(DMPlexOrientPoint(*dm, 0, -1));
130         PetscCall(DMPlexComputeCellGeometryFEM(*dm, 0, NULL, v0, J, invJ, &detJ));
131         PetscCheck(detJ >= 0,PETSC_COMM_SELF, PETSC_ERR_PLIB, "Something is wrong");
132       }
133     }
134     PetscCall(DMPlexOrient(*dm));
135     PetscCall(DMCreateLabel(*dm, "marker"));
136     PetscCall(DMGetLabel(*dm, "marker", &label));
137     PetscCall(DMPlexMarkBoundaryFaces(*dm, 1, label));
138     PetscCall(DMPlexLabelComplete(*dm, label));
139 
140     PetscCall(PetscViewerDestroy(&viewer));
141     PetscCall(VecRestoreArray(coordinates, &coords));
142     PetscCall(VecRestoreArray(topology, &topo_f));
143     PetscCall(PetscFree(cells));
144     PetscCall(VecDestroy(&coordinates));
145     PetscCall(VecDestroy(&topology));
146 #else
147     SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_SUP,"Reconfigure PETSc with --download-hdf5");
148 #endif
149   }
150   PetscCall(DMSetFromOptions(*dm));
151   PetscCall(DMViewFromOptions(*dm, NULL, "-dm_view"));
152   PetscFunctionReturn(0);
153 }
154 
155 void mass_kernel(PetscInt dim, PetscInt Nf, PetscInt NfAux,
156            const PetscInt uOff[], const PetscInt uOff_x[], const PetscScalar u[], const PetscScalar u_t[], const PetscScalar u_x[],
157            const PetscInt aOff[], const PetscInt aOff_x[], const PetscScalar a[], const PetscScalar a_t[], const PetscScalar a_x[],
158            PetscReal t, PetscReal u_tShift, const PetscReal x[], PetscInt numConstants, const PetscScalar constants[], PetscScalar g0[])
159 {
160   g0[0] = 1.0;
161 }
162 
163 void laplace_kernel(PetscInt dim, PetscInt Nf, PetscInt NfAux,
164            const PetscInt uOff[], const PetscInt uOff_x[], const PetscScalar u[], const PetscScalar u_t[], const PetscScalar u_x[],
165            const PetscInt aOff[], const PetscInt aOff_x[], const PetscScalar a[], const PetscScalar a_t[], const PetscScalar a_x[],
166            PetscReal t, PetscReal u_tShift, const PetscReal x[], PetscInt numConstants, const PetscScalar constants[], PetscScalar g3[])
167 {
168   PetscInt d;
169   for (d = 0; d < dim; ++d) g3[d*dim+d] = 1.0;
170 }
171 
172 /* data we seek to match */
173 PetscErrorCode data_kernel(PetscInt dim, PetscReal time, const PetscReal x[], PetscInt Nf, PetscScalar *y, void *ctx)
174 {
175   *y = 1.0/(2*PETSC_PI*PETSC_PI) * PetscSinReal(PETSC_PI*x[0]) * PetscSinReal(PETSC_PI*x[1]);
176   /* the associated control is sin(pi*x[0])*sin(pi*x[1]) */
177   return 0;
178 }
179 PetscErrorCode zero(PetscInt dim, PetscReal time, const PetscReal x[], PetscInt Nf, PetscScalar *u, void *ctx)
180 {
181   *u = 0.0;
182   return 0;
183 }
184 
185 PetscErrorCode CreateCtx(DM dm, AppCtx* user)
186 {
187 
188   DM             dm_mass;
189   DM             dm_laplace;
190   PetscDS        prob_mass;
191   PetscDS        prob_laplace;
192   PetscFE        fe;
193   DMLabel        label;
194   PetscSection   section;
195   PetscInt       n, k, p, d;
196   PetscInt       dof, off;
197   IS             is;
198   const PetscInt* points;
199   const PetscInt dim = 2;
200   const PetscInt id  = 1;
201   PetscErrorCode (**wtf)(PetscInt dim, PetscReal time, const PetscReal x[], PetscInt Nf, PetscScalar *u, void *ctx);
202 
203   PetscFunctionBeginUser;
204 
205   /* make the data we seek to match */
206   PetscCall(PetscFECreateDefault(PetscObjectComm((PetscObject) dm), dim, 1, PETSC_TRUE, NULL, 4, &fe));
207 
208   PetscCall(DMSetField(dm, 0, NULL, (PetscObject) fe));
209   PetscCall(DMCreateDS(dm));
210   PetscCall(DMCreateGlobalVector(dm, &user->data));
211 
212   /* ugh, this is hideous */
213   /* y_d = interpolate(Expression("sin(x[0]) + .."), V) */
214   PetscCall(PetscMalloc(1 * sizeof(void (*)(const PetscReal[], PetscScalar *, void *)), &wtf));
215   wtf[0] = data_kernel;
216   PetscCall(DMProjectFunction(dm, 0.0, wtf, NULL, INSERT_VALUES, user->data));
217   PetscCall(PetscFree(wtf));
218 
219   /* assemble(inner(u, v)*dx), almost */
220   PetscCall(DMClone(dm, &dm_mass));
221   PetscCall(DMCopyDisc(dm, dm_mass));
222   PetscCall(DMSetNumFields(dm_mass, 1));
223   PetscCall(DMPlexCopyCoordinates(dm, dm_mass)); /* why do I have to do this separately? */
224   PetscCall(DMGetDS(dm_mass, &prob_mass));
225   PetscCall(PetscDSSetJacobian(prob_mass, 0, 0, mass_kernel, NULL, NULL, NULL));
226   PetscCall(PetscDSSetDiscretization(prob_mass, 0, (PetscObject) fe));
227   PetscCall(DMCreateMatrix(dm_mass, &user->mass));
228   PetscCall(DMPlexSNESComputeJacobianFEM(dm_mass, user->data, user->mass, user->mass, NULL));
229   PetscCall(MatSetOption(user->mass, MAT_SYMMETRIC, PETSC_TRUE));
230   PetscCall(DMDestroy(&dm_mass));
231 
232   /* inner(grad(u), grad(v))*dx with homogeneous Dirichlet boundary conditions */
233   PetscCall(DMClone(dm, &dm_laplace));
234   PetscCall(DMCopyDisc(dm, dm_laplace));
235   PetscCall(DMSetNumFields(dm_laplace, 1));
236   PetscCall(DMPlexCopyCoordinates(dm, dm_laplace));
237   PetscCall(DMGetDS(dm_laplace, &prob_laplace));
238   PetscCall(PetscDSSetJacobian(prob_laplace, 0, 0, NULL, NULL, NULL, laplace_kernel));
239   PetscCall(PetscDSSetDiscretization(prob_laplace, 0, (PetscObject) fe));
240   PetscCall(DMCreateMatrix(dm_laplace, &user->laplace));
241   PetscCall(DMPlexSNESComputeJacobianFEM(dm_laplace, user->data, user->laplace, user->laplace, NULL));
242 
243   /* Code from Matt to get the indices associated with the boundary dofs */
244   PetscCall(DMGetLabel(dm_laplace, "marker", &label));
245   PetscCall(DMAddBoundary(dm_laplace, DM_BC_ESSENTIAL, "wall", label, 1, &id, 0, 0, NULL, (void (*)(void)) zero, NULL, NULL, NULL));
246   PetscCall(DMGetLocalSection(dm_laplace, &section));
247   PetscCall(DMLabelGetStratumSize(label, 1, &n));
248   PetscCall(DMLabelGetStratumIS(label, 1, &is));
249   PetscCall(ISGetIndices(is, &points));
250   user->num_bc_dofs = 0;
251   for (p = 0; p < n; ++p) {
252     PetscCall(PetscSectionGetDof(section, points[p], &dof));
253     user->num_bc_dofs += dof;
254   }
255   PetscCall(PetscMalloc1(user->num_bc_dofs, &user->bc_indices));
256   for (p = 0, k = 0; p < n; ++p) {
257     PetscCall(PetscSectionGetDof(section, points[p], &dof));
258     PetscCall(PetscSectionGetOffset(section, points[p], &off));
259     for (d = 0; d < dof; ++d) user->bc_indices[k++] = off+d;
260   }
261   PetscCall(ISRestoreIndices(is, &points));
262   PetscCall(ISDestroy(&is));
263   PetscCall(DMDestroy(&dm_laplace));
264 
265   /* This is how I handle boundary conditions. I can't figure out how to get
266      plex to play with the way I want to impose the BCs. This loses symmetry,
267      but not in a disastrous way. If someone can improve it, please do! */
268   PetscCall(MatZeroRows(user->laplace, user->num_bc_dofs, user->bc_indices, 1.0, NULL, NULL));
269   PetscCall(PetscCalloc1(user->num_bc_dofs, &user->bc_values));
270 
271   /* also create the KSP for solving the Laplace system */
272   PetscCall(KSPCreate(PETSC_COMM_WORLD, &user->ksp_laplace));
273   PetscCall(KSPSetOperators(user->ksp_laplace, user->laplace, user->laplace));
274   PetscCall(KSPSetOptionsPrefix(user->ksp_laplace, "laplace_"));
275   PetscCall(KSPSetFromOptions(user->ksp_laplace));
276 
277   /* A bit of setting up the user context */
278   user->dm = dm;
279   PetscCall(VecDuplicate(user->data, &user->state));
280   PetscCall(VecDuplicate(user->data, &user->adjoint));
281   PetscCall(VecDuplicate(user->data, &user->tmp1));
282   PetscCall(VecDuplicate(user->data, &user->tmp2));
283 
284   PetscCall(PetscFEDestroy(&fe));
285 
286   PetscFunctionReturn(0);
287 }
288 
289 PetscErrorCode DestroyCtx(AppCtx* user)
290 {
291   PetscFunctionBeginUser;
292 
293   PetscCall(MatDestroy(&user->mass));
294   PetscCall(MatDestroy(&user->laplace));
295   PetscCall(KSPDestroy(&user->ksp_laplace));
296   PetscCall(VecDestroy(&user->data));
297   PetscCall(VecDestroy(&user->state));
298   PetscCall(VecDestroy(&user->adjoint));
299   PetscCall(VecDestroy(&user->tmp1));
300   PetscCall(VecDestroy(&user->tmp2));
301   PetscCall(PetscFree(user->bc_indices));
302   PetscCall(PetscFree(user->bc_values));
303 
304   PetscFunctionReturn(0);
305 }
306 
307 PetscErrorCode ReducedFunctionGradient(Tao tao, Vec u, PetscReal* func, Vec g, void* userv)
308 {
309   AppCtx* user = (AppCtx*) userv;
310   const PetscReal alpha = 1.0e-6; /* regularisation parameter */
311   PetscReal inner;
312 
313   PetscFunctionBeginUser;
314 
315   PetscCall(MatMult(user->mass, u, user->tmp1));
316   PetscCall(VecDot(u, user->tmp1, &inner));               /* regularisation contribution to */
317   *func = alpha * 0.5 * inner;                                      /* the functional                 */
318 
319   PetscCall(VecSet(g, 0.0));
320   PetscCall(VecAXPY(g, alpha, user->tmp1));               /* regularisation contribution to the gradient */
321 
322   /* Now compute the forward state. */
323   PetscCall(VecSetValues(user->tmp1, user->num_bc_dofs, user->bc_indices, user->bc_values, INSERT_VALUES));
324   PetscCall(VecAssemblyBegin(user->tmp1));
325   PetscCall(VecAssemblyEnd(user->tmp1));
326   PetscCall(KSPSolve(user->ksp_laplace, user->tmp1, user->state)); /* forward solve */
327 
328   /* Now compute the adjoint state also. */
329   PetscCall(VecCopy(user->state, user->tmp1));
330   PetscCall(VecAXPY(user->tmp1, -1.0, user->data));
331   PetscCall(MatMult(user->mass, user->tmp1, user->tmp2));
332   PetscCall(VecDot(user->tmp1, user->tmp2, &inner));      /* misfit contribution to */
333   *func += 0.5 * inner;                                             /* the functional         */
334 
335   PetscCall(VecSetValues(user->tmp2, user->num_bc_dofs, user->bc_indices, user->bc_values, INSERT_VALUES));
336   PetscCall(VecAssemblyBegin(user->tmp2));
337   PetscCall(VecAssemblyEnd(user->tmp2));
338   PetscCall(KSPSolve(user->ksp_laplace, user->tmp2, user->adjoint)); /* adjoint solve */
339 
340   /* And bring it home with the gradient. */
341   PetscCall(MatMult(user->mass, user->adjoint, user->tmp1));
342   PetscCall(VecAXPY(g, 1.0, user->tmp1));                 /* adjoint contribution to the gradient */
343 
344   PetscFunctionReturn(0);
345 }
346 
347 int main(int argc, char **argv)
348 {
349   DM             dm;
350   Tao            tao;
351   Vec            u, lb, ub;
352   AppCtx         user;
353 
354   PetscCall(PetscInitialize(&argc, &argv, NULL,help));
355   PetscCall(CreateMesh(PETSC_COMM_WORLD, &user, &dm));
356   PetscCall(CreateCtx(dm, &user));
357 
358   PetscCall(DMCreateGlobalVector(dm, &u));
359   PetscCall(VecSet(u, 0.0));
360   PetscCall(VecDuplicate(u, &lb));
361   PetscCall(VecDuplicate(u, &ub));
362   PetscCall(VecSet(lb, 0.0)); /* satisfied at the minimum anyway */
363   PetscCall(VecSet(ub, 0.8)); /* a nontrivial upper bound */
364 
365   PetscCall(TaoCreate(PETSC_COMM_WORLD, &tao));
366   PetscCall(TaoSetSolution(tao, u));
367   PetscCall(TaoSetObjectiveAndGradient(tao,NULL, ReducedFunctionGradient, &user));
368   PetscCall(TaoSetVariableBounds(tao, lb, ub));
369   PetscCall(TaoSetType(tao, TAOBLMVM));
370   PetscCall(TaoSetFromOptions(tao));
371 
372   if (user.use_riesz) {
373     PetscCall(TaoLMVMSetH0(tao, user.mass));       /* crucial for mesh independence */
374     PetscCall(TaoSetGradientNorm(tao, user.mass));
375   }
376 
377   PetscCall(TaoSolve(tao));
378 
379   PetscCall(DMViewFromOptions(dm, NULL, "-dm_view"));
380   PetscCall(VecViewFromOptions(u, NULL, "-sol_view"));
381 
382   PetscCall(TaoDestroy(&tao));
383   PetscCall(DMDestroy(&dm));
384   PetscCall(VecDestroy(&u));
385   PetscCall(VecDestroy(&lb));
386   PetscCall(VecDestroy(&ub));
387   PetscCall(DestroyCtx(&user));
388   PetscCall(PetscFinalize());
389   return 0;
390 }
391 
392 /*TEST
393 
394     build:
395       requires: !complex !single
396 
397     test:
398       requires: hdf5 double datafilespath !defined(PETSC_USE_64BIT_INDICES) hypre
399       args: -laplace_ksp_type cg -laplace_pc_type hypre -laplace_ksp_monitor_true_residual -laplace_ksp_max_it 5 -mat_lmvm_ksp_type cg -mat_lmvm_ksp_rtol 1e-5 -mat_lmvm_pc_type gamg -tao_monitor -petscspace_degree 1 -tao_converged_reason -tao_gatol 1.0e-6 -dm_view hdf5:solution.h5 -sol_view hdf5:solution.h5::append -use_riesz 1 -f $DATAFILESPATH/meshes/mesh-1.h5
400       filter: sed -e "s/-nan/nan/g"
401 
402     test:
403       suffix: guess_pod
404       requires: double triangle
405       args: -laplace_ksp_type cg -laplace_pc_type gamg -laplace_ksp_monitor_true_residual -laplace_ksp_max_it 8 -laplace_pc_gamg_esteig_ksp_type cg -laplace_pc_gamg_esteig_ksp_max_it 5 -laplace_mg_levels_ksp_chebyshev_esteig 0,0.25,0,1.0 -laplace_ksp_converged_reason -mat_lmvm_ksp_type cg -mat_lmvm_ksp_rtol 1e-5 -mat_lmvm_pc_type gamg -mat_lmvm_pc_gamg_esteig_ksp_type cg -mat_lmvm_pc_gamg_esteig_ksp_max_it 3 -tao_monitor -petscspace_degree 1 -tao_converged_reason -dm_refine 0 -laplace_ksp_guess_type pod -tao_gatol 1e-6
406       filter: sed -e "s/-nan/nan/g" -e "s/-NaN/nan/g" -e "s/NaN/nan/g" -e "s/CONVERGED_RTOL iterations 9/CONVERGED_RTOL iterations 8/g" -e "s/CONVERGED_RTOL iterations 4/CONVERGED_RTOL iterations 3/g"
407 
408 TEST*/
409