1 static char help[] = "Reduced formulation of the mother problem of PDE-constrained optimisation.\n";
2
3 /*F
4 We solve the mother problem
5
6 min 1/2 || y - y_d ||^2_2 + \alpha/2 || u ||^2_2
7
8 subject to
9
10 - \laplace y = u on \Omega
11 y = 0 on \Gamma
12
13 where u is in L^2 and y is in H^1_0.
14
15 We formulate the reduced problem solely in terms of the control
16 by using the state equation to express y in terms of u, and then
17 apply LMVM/BLMVM to the resulting reduced problem.
18
19 Mesh independence is achieved by configuring the Riesz map for the control
20 space.
21
22 Example meshes where the Riesz map is crucial can be downloaded from the
23 http://people.maths.ox.ac.uk/~farrellp/meshes.tar.gz
24
25 Contributed by: Patrick Farrell <patrick.farrell@maths.ox.ac.uk>
26
27 Run with e.g.:
28 ./ex3 -laplace_ksp_type cg -laplace_pc_type hypre -tao_blmvm_mat_lmvm_J0_ksp_type cg -tao_blmvm_mat_lmvm_J0_pc_type gamg -laplace_ksp_monitor_true_residual -tao_monitor -petscspace_degree 1 -tao_converged_reason -tao_gatol 1.0e-9 -dm_view hdf5:solution.h5 -sol_view hdf5:solution.h5::append -use_riesz 1 -f $DATAFILESPATH/meshes/mesh-1.h5
29
30 and visualise in paraview with ../../../../petsc_gen_xdmf.py solution.h5.
31
32 Toggle the Riesz map (-use_riesz 0) to see the difference setting the Riesz maps makes.
33
34 TODO: broken for parallel runs
35 F*/
36
37 #include <petsc.h>
38 #include <petscfe.h>
39 #include <petscviewerhdf5.h>
40
41 typedef struct {
42 DM dm;
43 Mat mass;
44 Vec data;
45 Vec state;
46 Vec tmp1;
47 Vec tmp2;
48 Vec adjoint;
49 Mat laplace;
50 KSP ksp_laplace;
51 PetscInt num_bc_dofs;
52 PetscInt *bc_indices;
53 PetscScalar *bc_values;
54 PetscBool use_riesz;
55 } AppCtx;
56
CreateMesh(MPI_Comm comm,AppCtx * user,DM * dm)57 static PetscErrorCode CreateMesh(MPI_Comm comm, AppCtx *user, DM *dm)
58 {
59 PetscBool flg;
60 char filename[2048];
61
62 PetscFunctionBeginUser;
63 filename[0] = '\0';
64 user->use_riesz = PETSC_TRUE;
65
66 PetscOptionsBegin(comm, "", "Poisson mother problem options", "DMPLEX");
67 PetscCall(PetscOptionsBool("-use_riesz", "Use the Riesz map to achieve mesh independence", "ex3.c", user->use_riesz, &user->use_riesz, NULL));
68 PetscCall(PetscOptionsString("-f", "filename to read", "ex3.c", filename, filename, sizeof(filename), &flg));
69 PetscOptionsEnd();
70
71 if (!flg) {
72 PetscCall(DMCreate(comm, dm));
73 PetscCall(DMSetType(*dm, DMPLEX));
74 } else {
75 /* TODO Eliminate this in favor of DMLoad() in new code */
76 #if defined(PETSC_HAVE_HDF5)
77 const PetscInt vertices_per_cell = 3;
78 PetscViewer viewer;
79 Vec coordinates;
80 Vec topology;
81 PetscInt dim = 2, numCells;
82 PetscInt numVertices;
83 PetscScalar *coords;
84 PetscScalar *topo_f;
85 PetscInt *cells;
86 PetscInt j;
87 DMLabel label;
88
89 /* Read in FEniCS HDF5 output */
90 PetscCall(PetscViewerHDF5Open(comm, filename, FILE_MODE_READ, &viewer));
91
92 /* create Vecs to read in the data from H5 */
93 PetscCall(VecCreate(comm, &coordinates));
94 PetscCall(PetscObjectSetName((PetscObject)coordinates, "coordinates"));
95 PetscCall(VecSetBlockSize(coordinates, dim));
96 PetscCall(VecCreate(comm, &topology));
97 PetscCall(PetscObjectSetName((PetscObject)topology, "topology"));
98 PetscCall(VecSetBlockSize(topology, vertices_per_cell));
99
100 /* navigate to the right group */
101 PetscCall(PetscViewerHDF5PushGroup(viewer, "/Mesh/mesh"));
102
103 /* Read the Vecs */
104 PetscCall(VecLoad(coordinates, viewer));
105 PetscCall(VecLoad(topology, viewer));
106
107 /* do some ugly calculations */
108 PetscCall(VecGetSize(topology, &numCells));
109 numCells = numCells / vertices_per_cell;
110 PetscCall(VecGetSize(coordinates, &numVertices));
111 numVertices = numVertices / dim;
112
113 PetscCall(VecGetArray(coordinates, &coords));
114 PetscCall(VecGetArray(topology, &topo_f));
115 /* and now we have to convert the double representation to integers to pass over, argh */
116 PetscCall(PetscMalloc1(numCells * vertices_per_cell, &cells));
117 for (j = 0; j < numCells * vertices_per_cell; j++) cells[j] = (PetscInt)topo_f[j];
118
119 /* Now create the DM */
120 PetscCall(DMPlexCreateFromCellListPetsc(comm, dim, numCells, numVertices, vertices_per_cell, PETSC_TRUE, cells, dim, coords, dm));
121 /* Check for flipped first cell */
122 {
123 PetscReal v0[3], J[9], invJ[9], detJ;
124
125 PetscCall(DMPlexComputeCellGeometryFEM(*dm, 0, NULL, v0, J, invJ, &detJ));
126 if (detJ < 0) {
127 PetscCall(DMPlexOrientPoint(*dm, 0, -1));
128 PetscCall(DMPlexComputeCellGeometryFEM(*dm, 0, NULL, v0, J, invJ, &detJ));
129 PetscCheck(detJ >= 0, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Something is wrong");
130 }
131 }
132 PetscCall(DMPlexOrient(*dm));
133 PetscCall(DMCreateLabel(*dm, "marker"));
134 PetscCall(DMGetLabel(*dm, "marker", &label));
135 PetscCall(DMPlexMarkBoundaryFaces(*dm, 1, label));
136 PetscCall(DMPlexLabelComplete(*dm, label));
137
138 PetscCall(PetscViewerDestroy(&viewer));
139 PetscCall(VecRestoreArray(coordinates, &coords));
140 PetscCall(VecRestoreArray(topology, &topo_f));
141 PetscCall(PetscFree(cells));
142 PetscCall(VecDestroy(&coordinates));
143 PetscCall(VecDestroy(&topology));
144 #else
145 SETERRQ(PETSC_COMM_WORLD, PETSC_ERR_SUP, "Reconfigure PETSc with --download-hdf5");
146 #endif
147 }
148 PetscCall(DMSetFromOptions(*dm));
149 PetscCall(DMViewFromOptions(*dm, NULL, "-dm_view"));
150 PetscFunctionReturn(PETSC_SUCCESS);
151 }
152
mass_kernel(PetscInt dim,PetscInt Nf,PetscInt NfAux,const PetscInt uOff[],const PetscInt uOff_x[],const PetscScalar u[],const PetscScalar u_t[],const PetscScalar u_x[],const PetscInt aOff[],const PetscInt aOff_x[],const PetscScalar a[],const PetscScalar a_t[],const PetscScalar a_x[],PetscReal t,PetscReal u_tShift,const PetscReal x[],PetscInt numConstants,const PetscScalar constants[],PetscScalar g0[])153 void mass_kernel(PetscInt dim, PetscInt Nf, PetscInt NfAux, const PetscInt uOff[], const PetscInt uOff_x[], const PetscScalar u[], const PetscScalar u_t[], const PetscScalar u_x[], const PetscInt aOff[], const PetscInt aOff_x[], const PetscScalar a[], const PetscScalar a_t[], const PetscScalar a_x[], PetscReal t, PetscReal u_tShift, const PetscReal x[], PetscInt numConstants, const PetscScalar constants[], PetscScalar g0[])
154 {
155 g0[0] = 1.0;
156 }
157
laplace_kernel(PetscInt dim,PetscInt Nf,PetscInt NfAux,const PetscInt uOff[],const PetscInt uOff_x[],const PetscScalar u[],const PetscScalar u_t[],const PetscScalar u_x[],const PetscInt aOff[],const PetscInt aOff_x[],const PetscScalar a[],const PetscScalar a_t[],const PetscScalar a_x[],PetscReal t,PetscReal u_tShift,const PetscReal x[],PetscInt numConstants,const PetscScalar constants[],PetscScalar g3[])158 void laplace_kernel(PetscInt dim, PetscInt Nf, PetscInt NfAux, const PetscInt uOff[], const PetscInt uOff_x[], const PetscScalar u[], const PetscScalar u_t[], const PetscScalar u_x[], const PetscInt aOff[], const PetscInt aOff_x[], const PetscScalar a[], const PetscScalar a_t[], const PetscScalar a_x[], PetscReal t, PetscReal u_tShift, const PetscReal x[], PetscInt numConstants, const PetscScalar constants[], PetscScalar g3[])
159 {
160 PetscInt d;
161 for (d = 0; d < dim; ++d) g3[d * dim + d] = 1.0;
162 }
163
164 /* data we seek to match */
data_kernel(PetscInt dim,PetscReal time,const PetscReal x[],PetscInt Nf,PetscScalar * y,PetscCtx ctx)165 PetscErrorCode data_kernel(PetscInt dim, PetscReal time, const PetscReal x[], PetscInt Nf, PetscScalar *y, PetscCtx ctx)
166 {
167 *y = 1.0 / (2 * PETSC_PI * PETSC_PI) * PetscSinReal(PETSC_PI * x[0]) * PetscSinReal(PETSC_PI * x[1]);
168 /* the associated control is sin(pi*x[0])*sin(pi*x[1]) */
169 return PETSC_SUCCESS;
170 }
zero(PetscInt dim,PetscReal time,const PetscReal x[],PetscInt Nf,PetscScalar * u,PetscCtx ctx)171 PetscErrorCode zero(PetscInt dim, PetscReal time, const PetscReal x[], PetscInt Nf, PetscScalar *u, PetscCtx ctx)
172 {
173 *u = 0.0;
174 return PETSC_SUCCESS;
175 }
176
CreateCtx(DM dm,AppCtx * user)177 PetscErrorCode CreateCtx(DM dm, AppCtx *user)
178 {
179 DM dm_mass;
180 DM dm_laplace;
181 PetscDS prob_mass;
182 PetscDS prob_laplace;
183 PetscFE fe;
184 DMLabel label;
185 PetscSection section;
186 PetscInt n, k, p, d;
187 PetscInt dof, off;
188 IS is;
189 const PetscInt *points;
190 const PetscInt dim = 2;
191 PetscErrorCode (**wtf)(PetscInt dim, PetscReal time, const PetscReal x[], PetscInt Nf, PetscScalar *u, PetscCtx ctx);
192
193 PetscFunctionBeginUser;
194 /* make the data we seek to match */
195 PetscCall(PetscFECreateDefault(PetscObjectComm((PetscObject)dm), dim, 1, PETSC_TRUE, NULL, 4, &fe));
196
197 PetscCall(DMSetField(dm, 0, NULL, (PetscObject)fe));
198 PetscCall(DMCreateDS(dm));
199 PetscCall(DMCreateGlobalVector(dm, &user->data));
200
201 /* ugh, this is hideous */
202 /* y_d = interpolate(Expression("sin(x[0]) + .."), V) */
203 PetscCall(PetscMalloc(1 * sizeof(void (*)(const PetscReal[], PetscScalar *, void *)), &wtf));
204 wtf[0] = data_kernel;
205 PetscCall(DMProjectFunction(dm, 0.0, wtf, NULL, INSERT_VALUES, user->data));
206 PetscCall(PetscFree(wtf));
207
208 /* assemble(inner(u, v)*dx), almost */
209 PetscCall(DMClone(dm, &dm_mass));
210 PetscCall(DMCopyDisc(dm, dm_mass));
211 PetscCall(DMSetNumFields(dm_mass, 1));
212 PetscCall(DMPlexCopyCoordinates(dm, dm_mass)); /* why do I have to do this separately? */
213 PetscCall(DMGetDS(dm_mass, &prob_mass));
214 PetscCall(PetscDSSetJacobian(prob_mass, 0, 0, mass_kernel, NULL, NULL, NULL));
215 PetscCall(PetscDSSetDiscretization(prob_mass, 0, (PetscObject)fe));
216 PetscCall(DMCreateMatrix(dm_mass, &user->mass));
217 PetscCall(DMPlexSNESComputeJacobianFEM(dm_mass, user->data, user->mass, user->mass, NULL));
218 PetscCall(MatSetOption(user->mass, MAT_SYMMETRIC, PETSC_TRUE));
219 PetscCall(DMDestroy(&dm_mass));
220
221 /* inner(grad(u), grad(v))*dx with homogeneous Dirichlet boundary conditions */
222 PetscCall(DMClone(dm, &dm_laplace));
223 PetscCall(DMCopyDisc(dm, dm_laplace));
224 PetscCall(DMSetNumFields(dm_laplace, 1));
225 PetscCall(DMPlexCopyCoordinates(dm, dm_laplace));
226 PetscCall(DMGetDS(dm_laplace, &prob_laplace));
227 PetscCall(PetscDSSetJacobian(prob_laplace, 0, 0, NULL, NULL, NULL, laplace_kernel));
228 PetscCall(PetscDSSetDiscretization(prob_laplace, 0, (PetscObject)fe));
229 PetscCall(DMCreateMatrix(dm_laplace, &user->laplace));
230 PetscCall(DMPlexSNESComputeJacobianFEM(dm_laplace, user->data, user->laplace, user->laplace, NULL));
231
232 PetscCall(DMGetLabel(dm_laplace, "marker", &label));
233 PetscCall(DMGetLocalSection(dm_laplace, §ion));
234 PetscCall(DMLabelGetStratumSize(label, 1, &n));
235 PetscCall(DMLabelGetStratumIS(label, 1, &is));
236 PetscCall(ISGetIndices(is, &points));
237 user->num_bc_dofs = 0;
238 for (p = 0; p < n; ++p) {
239 PetscCall(PetscSectionGetDof(section, points[p], &dof));
240 user->num_bc_dofs += dof;
241 }
242 PetscCall(PetscMalloc1(user->num_bc_dofs, &user->bc_indices));
243 for (p = 0, k = 0; p < n; ++p) {
244 PetscCall(PetscSectionGetDof(section, points[p], &dof));
245 PetscCall(PetscSectionGetOffset(section, points[p], &off));
246 for (d = 0; d < dof; ++d) user->bc_indices[k++] = off + d;
247 }
248 PetscCall(ISRestoreIndices(is, &points));
249 PetscCall(ISDestroy(&is));
250 PetscCall(DMDestroy(&dm_laplace));
251
252 /* This is how I handle boundary conditions. I can't figure out how to get
253 plex to play with the way I want to impose the BCs. This loses symmetry,
254 but not in a disastrous way. If someone can improve it, please do! */
255 PetscCall(MatZeroRows(user->laplace, user->num_bc_dofs, user->bc_indices, 1.0, NULL, NULL));
256 PetscCall(PetscCalloc1(user->num_bc_dofs, &user->bc_values));
257
258 /* also create the KSP for solving the Laplace system */
259 PetscCall(KSPCreate(PETSC_COMM_WORLD, &user->ksp_laplace));
260 PetscCall(KSPSetOperators(user->ksp_laplace, user->laplace, user->laplace));
261 PetscCall(KSPSetOptionsPrefix(user->ksp_laplace, "laplace_"));
262 PetscCall(KSPSetFromOptions(user->ksp_laplace));
263
264 /* A bit of setting up the user context */
265 user->dm = dm;
266 PetscCall(VecDuplicate(user->data, &user->state));
267 PetscCall(VecDuplicate(user->data, &user->adjoint));
268 PetscCall(VecDuplicate(user->data, &user->tmp1));
269 PetscCall(VecDuplicate(user->data, &user->tmp2));
270
271 PetscCall(PetscFEDestroy(&fe));
272 PetscFunctionReturn(PETSC_SUCCESS);
273 }
274
DestroyCtx(AppCtx * user)275 PetscErrorCode DestroyCtx(AppCtx *user)
276 {
277 PetscFunctionBeginUser;
278 PetscCall(MatDestroy(&user->mass));
279 PetscCall(MatDestroy(&user->laplace));
280 PetscCall(KSPDestroy(&user->ksp_laplace));
281 PetscCall(VecDestroy(&user->data));
282 PetscCall(VecDestroy(&user->state));
283 PetscCall(VecDestroy(&user->adjoint));
284 PetscCall(VecDestroy(&user->tmp1));
285 PetscCall(VecDestroy(&user->tmp2));
286 PetscCall(PetscFree(user->bc_indices));
287 PetscCall(PetscFree(user->bc_values));
288 PetscFunctionReturn(PETSC_SUCCESS);
289 }
290
ReducedFunctionGradient(Tao tao,Vec u,PetscReal * func,Vec g,void * userv)291 PetscErrorCode ReducedFunctionGradient(Tao tao, Vec u, PetscReal *func, Vec g, void *userv)
292 {
293 AppCtx *user = (AppCtx *)userv;
294 const PetscReal alpha = 1.0e-6; /* regularisation parameter */
295 PetscReal inner;
296
297 PetscFunctionBeginUser;
298 PetscCall(MatMult(user->mass, u, user->tmp1));
299 PetscCall(VecDot(u, user->tmp1, &inner)); /* regularisation contribution to */
300 *func = alpha * 0.5 * inner; /* the functional */
301
302 PetscCall(VecSet(g, 0.0));
303 PetscCall(VecAXPY(g, alpha, user->tmp1)); /* regularisation contribution to the gradient */
304
305 /* Now compute the forward state. */
306 PetscCall(VecSetValues(user->tmp1, user->num_bc_dofs, user->bc_indices, user->bc_values, INSERT_VALUES));
307 PetscCall(VecAssemblyBegin(user->tmp1));
308 PetscCall(VecAssemblyEnd(user->tmp1));
309 PetscCall(KSPSolve(user->ksp_laplace, user->tmp1, user->state)); /* forward solve */
310
311 /* Now compute the adjoint state also. */
312 PetscCall(VecCopy(user->state, user->tmp1));
313 PetscCall(VecAXPY(user->tmp1, -1.0, user->data));
314 PetscCall(MatMult(user->mass, user->tmp1, user->tmp2));
315 PetscCall(VecDot(user->tmp1, user->tmp2, &inner)); /* misfit contribution to */
316 *func += 0.5 * inner; /* the functional */
317
318 PetscCall(VecSetValues(user->tmp2, user->num_bc_dofs, user->bc_indices, user->bc_values, INSERT_VALUES));
319 PetscCall(VecAssemblyBegin(user->tmp2));
320 PetscCall(VecAssemblyEnd(user->tmp2));
321 PetscCall(KSPSolve(user->ksp_laplace, user->tmp2, user->adjoint)); /* adjoint solve */
322
323 /* And bring it home with the gradient. */
324 PetscCall(MatMult(user->mass, user->adjoint, user->tmp1));
325 PetscCall(VecAXPY(g, 1.0, user->tmp1)); /* adjoint contribution to the gradient */
326 PetscFunctionReturn(PETSC_SUCCESS);
327 }
328
main(int argc,char ** argv)329 int main(int argc, char **argv)
330 {
331 DM dm;
332 Tao tao;
333 Vec u, lb, ub;
334 AppCtx user;
335
336 PetscFunctionBeginUser;
337 PetscCall(PetscInitialize(&argc, &argv, NULL, help));
338 PetscCall(CreateMesh(PETSC_COMM_WORLD, &user, &dm));
339 PetscCall(CreateCtx(dm, &user));
340
341 PetscCall(DMCreateGlobalVector(dm, &u));
342 PetscCall(VecSet(u, 0.0));
343 PetscCall(VecDuplicate(u, &lb));
344 PetscCall(VecDuplicate(u, &ub));
345 PetscCall(VecSet(lb, 0.0)); /* satisfied at the minimum anyway */
346 PetscCall(VecSet(ub, 0.8)); /* a nontrivial upper bound */
347
348 PetscCall(TaoCreate(PETSC_COMM_WORLD, &tao));
349 PetscCall(TaoSetSolution(tao, u));
350 PetscCall(TaoSetObjectiveAndGradient(tao, NULL, ReducedFunctionGradient, &user));
351 PetscCall(TaoSetVariableBounds(tao, lb, ub));
352 PetscCall(TaoSetType(tao, TAOBLMVM));
353 PetscCall(TaoSetFromOptions(tao));
354
355 if (user.use_riesz) {
356 PetscCall(TaoLMVMSetH0(tao, user.mass)); /* crucial for mesh independence */
357 PetscCall(TaoSetGradientNorm(tao, user.mass));
358 }
359
360 PetscCall(TaoSolve(tao));
361
362 PetscCall(DMViewFromOptions(dm, NULL, "-dm_view"));
363 PetscCall(VecViewFromOptions(u, NULL, "-sol_view"));
364
365 PetscCall(TaoDestroy(&tao));
366 PetscCall(DMDestroy(&dm));
367 PetscCall(VecDestroy(&u));
368 PetscCall(VecDestroy(&lb));
369 PetscCall(VecDestroy(&ub));
370 PetscCall(DestroyCtx(&user));
371 PetscCall(PetscFinalize());
372 return 0;
373 }
374
375 /*TEST
376
377 build:
378 requires: !complex !single
379
380 test:
381 requires: hdf5 double datafilespath !defined(PETSC_USE_64BIT_INDICES) hypre !cuda
382 args: -laplace_ksp_type cg -laplace_pc_type hypre -laplace_ksp_monitor_true_residual -laplace_ksp_max_it 5 -tao_blmvm_mat_lmvm_J0_ksp_type cg -tao_blmvm_mat_lmvm_J0_ksp_rtol 1e-5 -tao_blmvm_mat_lmvm_J0_pc_type gamg -tao_monitor -petscspace_degree 1 -tao_converged_reason -tao_gatol 1.0e-6 -dm_view hdf5:solution.h5 -sol_view hdf5:solution.h5::append -use_riesz 1 -f $DATAFILESPATH/meshes/mesh-1.h5
383 filter: sed -e "s/-nan/nan/g"
384
385 test:
386 suffix: guess_pod
387 requires: double triangle
388 args: -laplace_ksp_type cg -laplace_pc_type gamg -laplace_ksp_max_it 8 -laplace_pc_gamg_esteig_ksp_type cg -laplace_pc_gamg_esteig_ksp_max_it 5 -laplace_mg_levels_ksp_chebyshev_esteig 0,0.25,0,1.0 -tao_blmvm_mat_lmvm_J0_ksp_type cg -tao_blmvm_mat_lmvm_J0_ksp_rtol 1e-5 -tao_blmvm_mat_lmvm_J0_pc_type gamg -tao_blmvm_mat_lmvm_J0_pc_gamg_esteig_ksp_type cg -tao_blmvm_mat_lmvm_J0_pc_gamg_esteig_ksp_max_it 3 -tao_monitor -petscspace_degree 1 -tao_converged_reason -dm_refine 0 -laplace_ksp_guess_type pod -tao_gatol 1e-6 -laplace_pc_gamg_aggressive_coarsening 0
389 filter: sed -e "s/-nan/nan/g" -e "s/-NaN/nan/g" -e "s/NaN/nan/g" -e "s/CONVERGED_RTOL iterations 9/CONVERGED_RTOL iterations 8/g" -e "s/CONVERGED_RTOL iterations 4/CONVERGED_RTOL iterations 3/g"
390
391 TEST*/
392