xref: /petsc/src/tao/tutorials/ex3.c (revision ebead697dbf761eb322f829370bbe90b3bd93fa3)
1 static char help[] = "Reduced formulation of the mother problem of PDE-constrained optimisation.\n";
2 
3 /*F
4   We solve the mother problem
5 
6   min 1/2 || y - y_d ||^2_2 + \alpha/2 || u ||^2_2
7 
8   subject to
9 
10           - \laplace y = u          on \Omega
11                      y = 0          on \Gamma
12 
13   where u is in L^2 and y is in H^1_0.
14 
15   We formulate the reduced problem solely in terms of the control
16   by using the state equation to express y in terms of u, and then
17   apply LMVM/BLMVM to the resulting reduced problem.
18 
19   Mesh independence is achieved by configuring the Riesz map for the control
20   space.
21 
22   Example meshes where the Riesz map is crucial can be downloaded from the
23   http://people.maths.ox.ac.uk/~farrellp/meshes.tar.gz
24 
25   Contributed by: Patrick Farrell <patrick.farrell@maths.ox.ac.uk>
26 
27   Run with e.g.:
28   ./ex3 -laplace_ksp_type cg -laplace_pc_type hypre -mat_lmvm_ksp_type cg -mat_lmvm_pc_type gamg -laplace_ksp_monitor_true_residual -tao_monitor -petscspace_degree 1 -tao_converged_reason -tao_gatol 1.0e-9 -dm_view hdf5:solution.h5 -sol_view hdf5:solution.h5::append -use_riesz 1 -f $DATAFILESPATH/meshes/mesh-1.h5
29 
30   and visualise in paraview with ../../../../petsc_gen_xdmf.py solution.h5.
31 
32   Toggle the Riesz map (-use_riesz 0) to see the difference setting the Riesz maps makes.
33 
34   TODO: broken for parallel runs
35 F*/
36 
37 #include <petsc.h>
38 #include <petscfe.h>
39 #include <petscviewerhdf5.h>
40 
41 typedef struct {
42   DM  dm;
43   Mat mass;
44   Vec data;
45   Vec state;
46   Vec tmp1;
47   Vec tmp2;
48   Vec adjoint;
49   Mat laplace;
50   KSP ksp_laplace;
51   PetscInt  num_bc_dofs;
52   PetscInt* bc_indices;
53   PetscScalar* bc_values;
54   PetscBool use_riesz;
55 } AppCtx;
56 
57 static PetscErrorCode CreateMesh(MPI_Comm comm, AppCtx *user, DM *dm)
58 {
59   PetscBool      flg;
60   char           filename[2048];
61 
62   PetscFunctionBeginUser;
63   filename[0] = '\0';
64   user->use_riesz = PETSC_TRUE;
65 
66   PetscOptionsBegin(comm, "", "Poisson mother problem options", "DMPLEX");
67   PetscCall(PetscOptionsBool("-use_riesz", "Use the Riesz map to achieve mesh independence", "ex3.c", user->use_riesz, &user->use_riesz, NULL));
68   PetscCall(PetscOptionsString("-f", "filename to read", "ex3.c", filename, filename, sizeof(filename), &flg));
69   PetscOptionsEnd();
70 
71   if (!flg) {
72     PetscCall(DMCreate(comm, dm));
73     PetscCall(DMSetType(*dm, DMPLEX));
74   } else {
75     /* TODO Eliminate this in favor of DMLoad() in new code */
76 #if defined(PETSC_HAVE_HDF5)
77     const PetscInt vertices_per_cell = 3;
78     PetscViewer    viewer;
79     Vec            coordinates;
80     Vec            topology;
81     PetscInt       dim = 2, numCells;
82     PetscInt       numVertices;
83     PetscScalar*   coords;
84     PetscScalar*   topo_f;
85     PetscInt*      cells;
86     PetscInt       j;
87     DMLabel        label;
88 
89     /* Read in FEniCS HDF5 output */
90     PetscCall(PetscViewerHDF5Open(comm, filename, FILE_MODE_READ, &viewer));
91 
92     /* create Vecs to read in the data from H5 */
93     PetscCall(VecCreate(comm, &coordinates));
94     PetscCall(PetscObjectSetName((PetscObject)coordinates, "coordinates"));
95     PetscCall(VecSetBlockSize(coordinates, dim));
96     PetscCall(VecCreate(comm, &topology));
97     PetscCall(PetscObjectSetName((PetscObject)topology, "topology"));
98     PetscCall(VecSetBlockSize(topology, vertices_per_cell));
99 
100     /* navigate to the right group */
101     PetscCall(PetscViewerHDF5PushGroup(viewer, "/Mesh/mesh"));
102 
103     /* Read the Vecs */
104     PetscCall(VecLoad(coordinates, viewer));
105     PetscCall(VecLoad(topology, viewer));
106 
107     /* do some ugly calculations */
108     PetscCall(VecGetSize(topology, &numCells));
109     numCells = numCells / vertices_per_cell;
110     PetscCall(VecGetSize(coordinates, &numVertices));
111     numVertices = numVertices / dim;
112 
113     PetscCall(VecGetArray(coordinates, &coords));
114     PetscCall(VecGetArray(topology, &topo_f));
115     /* and now we have to convert the double representation to integers to pass over, argh */
116     PetscCall(PetscMalloc1(numCells*vertices_per_cell, &cells));
117     for (j = 0; j < numCells*vertices_per_cell; j++) {
118       cells[j] = (PetscInt) topo_f[j];
119     }
120 
121     /* Now create the DM */
122     PetscCall(DMPlexCreateFromCellListPetsc(comm, dim, numCells, numVertices, vertices_per_cell, PETSC_TRUE, cells, dim, coords, dm));
123     /* Check for flipped first cell */
124     {
125       PetscReal v0[3], J[9], invJ[9], detJ;
126 
127       PetscCall(DMPlexComputeCellGeometryFEM(*dm, 0, NULL, v0, J, invJ, &detJ));
128       if (detJ < 0) {
129         PetscCall(DMPlexOrientPoint(*dm, 0, -1));
130         PetscCall(DMPlexComputeCellGeometryFEM(*dm, 0, NULL, v0, J, invJ, &detJ));
131         PetscCheck(detJ >= 0,PETSC_COMM_SELF, PETSC_ERR_PLIB, "Something is wrong");
132       }
133     }
134     PetscCall(DMPlexOrient(*dm));
135     PetscCall(DMCreateLabel(*dm, "marker"));
136     PetscCall(DMGetLabel(*dm, "marker", &label));
137     PetscCall(DMPlexMarkBoundaryFaces(*dm, 1, label));
138     PetscCall(DMPlexLabelComplete(*dm, label));
139 
140     PetscCall(PetscViewerDestroy(&viewer));
141     PetscCall(VecRestoreArray(coordinates, &coords));
142     PetscCall(VecRestoreArray(topology, &topo_f));
143     PetscCall(PetscFree(cells));
144     PetscCall(VecDestroy(&coordinates));
145     PetscCall(VecDestroy(&topology));
146 #else
147     SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_SUP,"Reconfigure PETSc with --download-hdf5");
148 #endif
149   }
150   PetscCall(DMSetFromOptions(*dm));
151   PetscCall(DMViewFromOptions(*dm, NULL, "-dm_view"));
152   PetscFunctionReturn(0);
153 }
154 
155 void mass_kernel(PetscInt dim, PetscInt Nf, PetscInt NfAux,
156            const PetscInt uOff[], const PetscInt uOff_x[], const PetscScalar u[], const PetscScalar u_t[], const PetscScalar u_x[],
157            const PetscInt aOff[], const PetscInt aOff_x[], const PetscScalar a[], const PetscScalar a_t[], const PetscScalar a_x[],
158            PetscReal t, PetscReal u_tShift, const PetscReal x[], PetscInt numConstants, const PetscScalar constants[], PetscScalar g0[])
159 {
160   g0[0] = 1.0;
161 }
162 
163 void laplace_kernel(PetscInt dim, PetscInt Nf, PetscInt NfAux,
164            const PetscInt uOff[], const PetscInt uOff_x[], const PetscScalar u[], const PetscScalar u_t[], const PetscScalar u_x[],
165            const PetscInt aOff[], const PetscInt aOff_x[], const PetscScalar a[], const PetscScalar a_t[], const PetscScalar a_x[],
166            PetscReal t, PetscReal u_tShift, const PetscReal x[], PetscInt numConstants, const PetscScalar constants[], PetscScalar g3[])
167 {
168   PetscInt d;
169   for (d = 0; d < dim; ++d) g3[d*dim+d] = 1.0;
170 }
171 
172 /* data we seek to match */
173 PetscErrorCode data_kernel(PetscInt dim, PetscReal time, const PetscReal x[], PetscInt Nf, PetscScalar *y, void *ctx)
174 {
175   *y = 1.0/(2*PETSC_PI*PETSC_PI) * PetscSinReal(PETSC_PI*x[0]) * PetscSinReal(PETSC_PI*x[1]);
176   /* the associated control is sin(pi*x[0])*sin(pi*x[1]) */
177   return 0;
178 }
179 PetscErrorCode zero(PetscInt dim, PetscReal time, const PetscReal x[], PetscInt Nf, PetscScalar *u, void *ctx)
180 {
181   *u = 0.0;
182   return 0;
183 }
184 
185 PetscErrorCode CreateCtx(DM dm, AppCtx* user)
186 {
187 
188   DM             dm_mass;
189   DM             dm_laplace;
190   PetscDS        prob_mass;
191   PetscDS        prob_laplace;
192   PetscFE        fe;
193   DMLabel        label;
194   PetscSection   section;
195   PetscInt       n, k, p, d;
196   PetscInt       dof, off;
197   IS             is;
198   const PetscInt* points;
199   const PetscInt dim = 2;
200   PetscErrorCode (**wtf)(PetscInt dim, PetscReal time, const PetscReal x[], PetscInt Nf, PetscScalar *u, void *ctx);
201 
202   PetscFunctionBeginUser;
203 
204   /* make the data we seek to match */
205   PetscCall(PetscFECreateDefault(PetscObjectComm((PetscObject) dm), dim, 1, PETSC_TRUE, NULL, 4, &fe));
206 
207   PetscCall(DMSetField(dm, 0, NULL, (PetscObject) fe));
208   PetscCall(DMCreateDS(dm));
209   PetscCall(DMCreateGlobalVector(dm, &user->data));
210 
211   /* ugh, this is hideous */
212   /* y_d = interpolate(Expression("sin(x[0]) + .."), V) */
213   PetscCall(PetscMalloc(1 * sizeof(void (*)(const PetscReal[], PetscScalar *, void *)), &wtf));
214   wtf[0] = data_kernel;
215   PetscCall(DMProjectFunction(dm, 0.0, wtf, NULL, INSERT_VALUES, user->data));
216   PetscCall(PetscFree(wtf));
217 
218   /* assemble(inner(u, v)*dx), almost */
219   PetscCall(DMClone(dm, &dm_mass));
220   PetscCall(DMCopyDisc(dm, dm_mass));
221   PetscCall(DMSetNumFields(dm_mass, 1));
222   PetscCall(DMPlexCopyCoordinates(dm, dm_mass)); /* why do I have to do this separately? */
223   PetscCall(DMGetDS(dm_mass, &prob_mass));
224   PetscCall(PetscDSSetJacobian(prob_mass, 0, 0, mass_kernel, NULL, NULL, NULL));
225   PetscCall(PetscDSSetDiscretization(prob_mass, 0, (PetscObject) fe));
226   PetscCall(DMCreateMatrix(dm_mass, &user->mass));
227   PetscCall(DMPlexSNESComputeJacobianFEM(dm_mass, user->data, user->mass, user->mass, NULL));
228   PetscCall(MatSetOption(user->mass, MAT_SYMMETRIC, PETSC_TRUE));
229   PetscCall(DMDestroy(&dm_mass));
230 
231   /* inner(grad(u), grad(v))*dx with homogeneous Dirichlet boundary conditions */
232   PetscCall(DMClone(dm, &dm_laplace));
233   PetscCall(DMCopyDisc(dm, dm_laplace));
234   PetscCall(DMSetNumFields(dm_laplace, 1));
235   PetscCall(DMPlexCopyCoordinates(dm, dm_laplace));
236   PetscCall(DMGetDS(dm_laplace, &prob_laplace));
237   PetscCall(PetscDSSetJacobian(prob_laplace, 0, 0, NULL, NULL, NULL, laplace_kernel));
238   PetscCall(PetscDSSetDiscretization(prob_laplace, 0, (PetscObject) fe));
239   PetscCall(DMCreateMatrix(dm_laplace, &user->laplace));
240   PetscCall(DMPlexSNESComputeJacobianFEM(dm_laplace, user->data, user->laplace, user->laplace, NULL));
241 
242   PetscCall(DMGetLabel(dm_laplace, "marker", &label));
243   PetscCall(DMGetLocalSection(dm_laplace, &section));
244   PetscCall(DMLabelGetStratumSize(label, 1, &n));
245   PetscCall(DMLabelGetStratumIS(label, 1, &is));
246   PetscCall(ISGetIndices(is, &points));
247   user->num_bc_dofs = 0;
248   for (p = 0; p < n; ++p) {
249     PetscCall(PetscSectionGetDof(section, points[p], &dof));
250     user->num_bc_dofs += dof;
251   }
252   PetscCall(PetscMalloc1(user->num_bc_dofs, &user->bc_indices));
253   for (p = 0, k = 0; p < n; ++p) {
254     PetscCall(PetscSectionGetDof(section, points[p], &dof));
255     PetscCall(PetscSectionGetOffset(section, points[p], &off));
256     for (d = 0; d < dof; ++d) user->bc_indices[k++] = off+d;
257   }
258   PetscCall(ISRestoreIndices(is, &points));
259   PetscCall(ISDestroy(&is));
260   PetscCall(DMDestroy(&dm_laplace));
261 
262   /* This is how I handle boundary conditions. I can't figure out how to get
263      plex to play with the way I want to impose the BCs. This loses symmetry,
264      but not in a disastrous way. If someone can improve it, please do! */
265   PetscCall(MatZeroRows(user->laplace, user->num_bc_dofs, user->bc_indices, 1.0, NULL, NULL));
266   PetscCall(PetscCalloc1(user->num_bc_dofs, &user->bc_values));
267 
268   /* also create the KSP for solving the Laplace system */
269   PetscCall(KSPCreate(PETSC_COMM_WORLD, &user->ksp_laplace));
270   PetscCall(KSPSetOperators(user->ksp_laplace, user->laplace, user->laplace));
271   PetscCall(KSPSetOptionsPrefix(user->ksp_laplace, "laplace_"));
272   PetscCall(KSPSetFromOptions(user->ksp_laplace));
273 
274   /* A bit of setting up the user context */
275   user->dm = dm;
276   PetscCall(VecDuplicate(user->data, &user->state));
277   PetscCall(VecDuplicate(user->data, &user->adjoint));
278   PetscCall(VecDuplicate(user->data, &user->tmp1));
279   PetscCall(VecDuplicate(user->data, &user->tmp2));
280 
281   PetscCall(PetscFEDestroy(&fe));
282 
283   PetscFunctionReturn(0);
284 }
285 
286 PetscErrorCode DestroyCtx(AppCtx* user)
287 {
288   PetscFunctionBeginUser;
289 
290   PetscCall(MatDestroy(&user->mass));
291   PetscCall(MatDestroy(&user->laplace));
292   PetscCall(KSPDestroy(&user->ksp_laplace));
293   PetscCall(VecDestroy(&user->data));
294   PetscCall(VecDestroy(&user->state));
295   PetscCall(VecDestroy(&user->adjoint));
296   PetscCall(VecDestroy(&user->tmp1));
297   PetscCall(VecDestroy(&user->tmp2));
298   PetscCall(PetscFree(user->bc_indices));
299   PetscCall(PetscFree(user->bc_values));
300 
301   PetscFunctionReturn(0);
302 }
303 
304 PetscErrorCode ReducedFunctionGradient(Tao tao, Vec u, PetscReal* func, Vec g, void* userv)
305 {
306   AppCtx* user = (AppCtx*) userv;
307   const PetscReal alpha = 1.0e-6; /* regularisation parameter */
308   PetscReal inner;
309 
310   PetscFunctionBeginUser;
311 
312   PetscCall(MatMult(user->mass, u, user->tmp1));
313   PetscCall(VecDot(u, user->tmp1, &inner));               /* regularisation contribution to */
314   *func = alpha * 0.5 * inner;                                      /* the functional                 */
315 
316   PetscCall(VecSet(g, 0.0));
317   PetscCall(VecAXPY(g, alpha, user->tmp1));               /* regularisation contribution to the gradient */
318 
319   /* Now compute the forward state. */
320   PetscCall(VecSetValues(user->tmp1, user->num_bc_dofs, user->bc_indices, user->bc_values, INSERT_VALUES));
321   PetscCall(VecAssemblyBegin(user->tmp1));
322   PetscCall(VecAssemblyEnd(user->tmp1));
323   PetscCall(KSPSolve(user->ksp_laplace, user->tmp1, user->state)); /* forward solve */
324 
325   /* Now compute the adjoint state also. */
326   PetscCall(VecCopy(user->state, user->tmp1));
327   PetscCall(VecAXPY(user->tmp1, -1.0, user->data));
328   PetscCall(MatMult(user->mass, user->tmp1, user->tmp2));
329   PetscCall(VecDot(user->tmp1, user->tmp2, &inner));      /* misfit contribution to */
330   *func += 0.5 * inner;                                             /* the functional         */
331 
332   PetscCall(VecSetValues(user->tmp2, user->num_bc_dofs, user->bc_indices, user->bc_values, INSERT_VALUES));
333   PetscCall(VecAssemblyBegin(user->tmp2));
334   PetscCall(VecAssemblyEnd(user->tmp2));
335   PetscCall(KSPSolve(user->ksp_laplace, user->tmp2, user->adjoint)); /* adjoint solve */
336 
337   /* And bring it home with the gradient. */
338   PetscCall(MatMult(user->mass, user->adjoint, user->tmp1));
339   PetscCall(VecAXPY(g, 1.0, user->tmp1));                 /* adjoint contribution to the gradient */
340 
341   PetscFunctionReturn(0);
342 }
343 
344 int main(int argc, char **argv)
345 {
346   DM             dm;
347   Tao            tao;
348   Vec            u, lb, ub;
349   AppCtx         user;
350 
351   PetscFunctionBeginUser;
352   PetscCall(PetscInitialize(&argc, &argv, NULL,help));
353   PetscCall(CreateMesh(PETSC_COMM_WORLD, &user, &dm));
354   PetscCall(CreateCtx(dm, &user));
355 
356   PetscCall(DMCreateGlobalVector(dm, &u));
357   PetscCall(VecSet(u, 0.0));
358   PetscCall(VecDuplicate(u, &lb));
359   PetscCall(VecDuplicate(u, &ub));
360   PetscCall(VecSet(lb, 0.0)); /* satisfied at the minimum anyway */
361   PetscCall(VecSet(ub, 0.8)); /* a nontrivial upper bound */
362 
363   PetscCall(TaoCreate(PETSC_COMM_WORLD, &tao));
364   PetscCall(TaoSetSolution(tao, u));
365   PetscCall(TaoSetObjectiveAndGradient(tao,NULL, ReducedFunctionGradient, &user));
366   PetscCall(TaoSetVariableBounds(tao, lb, ub));
367   PetscCall(TaoSetType(tao, TAOBLMVM));
368   PetscCall(TaoSetFromOptions(tao));
369 
370   if (user.use_riesz) {
371     PetscCall(TaoLMVMSetH0(tao, user.mass));       /* crucial for mesh independence */
372     PetscCall(TaoSetGradientNorm(tao, user.mass));
373   }
374 
375   PetscCall(TaoSolve(tao));
376 
377   PetscCall(DMViewFromOptions(dm, NULL, "-dm_view"));
378   PetscCall(VecViewFromOptions(u, NULL, "-sol_view"));
379 
380   PetscCall(TaoDestroy(&tao));
381   PetscCall(DMDestroy(&dm));
382   PetscCall(VecDestroy(&u));
383   PetscCall(VecDestroy(&lb));
384   PetscCall(VecDestroy(&ub));
385   PetscCall(DestroyCtx(&user));
386   PetscCall(PetscFinalize());
387   return 0;
388 }
389 
390 /*TEST
391 
392     build:
393       requires: !complex !single
394 
395     test:
396       requires: hdf5 double datafilespath !defined(PETSC_USE_64BIT_INDICES) hypre !cuda
397       args: -laplace_ksp_type cg -laplace_pc_type hypre -laplace_ksp_monitor_true_residual -laplace_ksp_max_it 5 -mat_lmvm_ksp_type cg -mat_lmvm_ksp_rtol 1e-5 -mat_lmvm_pc_type gamg -tao_monitor -petscspace_degree 1 -tao_converged_reason -tao_gatol 1.0e-6 -dm_view hdf5:solution.h5 -sol_view hdf5:solution.h5::append -use_riesz 1 -f $DATAFILESPATH/meshes/mesh-1.h5
398       filter: sed -e "s/-nan/nan/g"
399 
400     test:
401       suffix: guess_pod
402       requires: double triangle
403       args: -laplace_ksp_type cg -laplace_pc_type gamg -laplace_ksp_monitor_true_residual -laplace_ksp_max_it 8 -laplace_pc_gamg_esteig_ksp_type cg -laplace_pc_gamg_esteig_ksp_max_it 5 -laplace_mg_levels_ksp_chebyshev_esteig 0,0.25,0,1.0 -laplace_ksp_converged_reason -mat_lmvm_ksp_type cg -mat_lmvm_ksp_rtol 1e-5 -mat_lmvm_pc_type gamg -mat_lmvm_pc_gamg_esteig_ksp_type cg -mat_lmvm_pc_gamg_esteig_ksp_max_it 3 -tao_monitor -petscspace_degree 1 -tao_converged_reason -dm_refine 0 -laplace_ksp_guess_type pod -tao_gatol 1e-6 -laplace_pc_gamg_aggressive_coarsening 0
404       filter: sed -e "s/-nan/nan/g" -e "s/-NaN/nan/g" -e "s/NaN/nan/g" -e "s/CONVERGED_RTOL iterations 9/CONVERGED_RTOL iterations 8/g" -e "s/CONVERGED_RTOL iterations 4/CONVERGED_RTOL iterations 3/g"
405 
406 TEST*/
407