xref: /petsc/src/tao/tutorials/ex3.c (revision 6a5217c03994f2d95bb2e6dbd8bed42381aeb015)
1 static char help[] = "Reduced formulation of the mother problem of PDE-constrained optimisation.\n";
2 
3 /*F
4   We solve the mother problem
5 
6   min 1/2 || y - y_d ||^2_2 + \alpha/2 || u ||^2_2
7 
8   subject to
9 
10           - \laplace y = u          on \Omega
11                      y = 0          on \Gamma
12 
13   where u is in L^2 and y is in H^1_0.
14 
15   We formulate the reduced problem solely in terms of the control
16   by using the state equation to express y in terms of u, and then
17   apply LMVM/BLMVM to the resulting reduced problem.
18 
19   Mesh independence is achieved by configuring the Riesz map for the control
20   space.
21 
22   Example meshes where the Riesz map is crucial can be downloaded from the
23   http://people.maths.ox.ac.uk/~farrellp/meshes.tar.gz
24 
25   Contributed by: Patrick Farrell <patrick.farrell@maths.ox.ac.uk>
26 
27   Run with e.g.:
28   ./ex3 -laplace_ksp_type cg -laplace_pc_type hypre -mat_lmvm_ksp_type cg -mat_lmvm_pc_type gamg -laplace_ksp_monitor_true_residual -tao_monitor -petscspace_degree 1 -tao_converged_reason -tao_gatol 1.0e-9 -dm_view hdf5:solution.h5 -sol_view hdf5:solution.h5::append -use_riesz 1 -f $DATAFILESPATH/meshes/mesh-1.h5
29 
30   and visualise in paraview with ../../../../petsc_gen_xdmf.py solution.h5.
31 
32   Toggle the Riesz map (-use_riesz 0) to see the difference setting the Riesz maps makes.
33 
34   TODO: broken for parallel runs
35 F*/
36 
37 #include <petsc.h>
38 #include <petscfe.h>
39 #include <petscviewerhdf5.h>
40 
41 typedef struct {
42   DM  dm;
43   Mat mass;
44   Vec data;
45   Vec state;
46   Vec tmp1;
47   Vec tmp2;
48   Vec adjoint;
49   Mat laplace;
50   KSP ksp_laplace;
51   PetscInt  num_bc_dofs;
52   PetscInt* bc_indices;
53   PetscScalar* bc_values;
54   PetscBool use_riesz;
55 } AppCtx;
56 
57 static PetscErrorCode CreateMesh(MPI_Comm comm, AppCtx *user, DM *dm)
58 {
59   PetscBool      flg;
60   char           filename[2048];
61   PetscErrorCode ierr;
62 
63   PetscFunctionBeginUser;
64   filename[0] = '\0';
65   user->use_riesz = PETSC_TRUE;
66 
67   ierr = PetscOptionsBegin(comm, "", "Poisson mother problem options", "DMPLEX");PetscCall(ierr);
68   PetscCall(PetscOptionsBool("-use_riesz", "Use the Riesz map to achieve mesh independence", "ex3.c", user->use_riesz, &user->use_riesz, NULL));
69   PetscCall(PetscOptionsString("-f", "filename to read", "ex3.c", filename, filename, sizeof(filename), &flg));
70   ierr = PetscOptionsEnd();PetscCall(ierr);
71 
72   if (!flg) {
73     PetscCall(DMCreate(comm, dm));
74     PetscCall(DMSetType(*dm, DMPLEX));
75   } else {
76     /* TODO Eliminate this in favor of DMLoad() in new code */
77 #if defined(PETSC_HAVE_HDF5)
78     const PetscInt vertices_per_cell = 3;
79     PetscViewer    viewer;
80     Vec            coordinates;
81     Vec            topology;
82     PetscInt       dim = 2, numCells;
83     PetscInt       numVertices;
84     PetscScalar*   coords;
85     PetscScalar*   topo_f;
86     PetscInt*      cells;
87     PetscInt       j;
88     DMLabel        label;
89 
90     /* Read in FEniCS HDF5 output */
91     PetscCall(PetscViewerHDF5Open(comm, filename, FILE_MODE_READ, &viewer));
92 
93     /* create Vecs to read in the data from H5 */
94     PetscCall(VecCreate(comm, &coordinates));
95     PetscCall(PetscObjectSetName((PetscObject)coordinates, "coordinates"));
96     PetscCall(VecSetBlockSize(coordinates, dim));
97     PetscCall(VecCreate(comm, &topology));
98     PetscCall(PetscObjectSetName((PetscObject)topology, "topology"));
99     PetscCall(VecSetBlockSize(topology, vertices_per_cell));
100 
101     /* navigate to the right group */
102     PetscCall(PetscViewerHDF5PushGroup(viewer, "/Mesh/mesh"));
103 
104     /* Read the Vecs */
105     PetscCall(VecLoad(coordinates, viewer));
106     PetscCall(VecLoad(topology, viewer));
107 
108     /* do some ugly calculations */
109     PetscCall(VecGetSize(topology, &numCells));
110     numCells = numCells / vertices_per_cell;
111     PetscCall(VecGetSize(coordinates, &numVertices));
112     numVertices = numVertices / dim;
113 
114     PetscCall(VecGetArray(coordinates, &coords));
115     PetscCall(VecGetArray(topology, &topo_f));
116     /* and now we have to convert the double representation to integers to pass over, argh */
117     PetscCall(PetscMalloc1(numCells*vertices_per_cell, &cells));
118     for (j = 0; j < numCells*vertices_per_cell; j++) {
119       cells[j] = (PetscInt) topo_f[j];
120     }
121 
122     /* Now create the DM */
123     PetscCall(DMPlexCreateFromCellListPetsc(comm, dim, numCells, numVertices, vertices_per_cell, PETSC_TRUE, cells, dim, coords, dm));
124     /* Check for flipped first cell */
125     {
126       PetscReal v0[3], J[9], invJ[9], detJ;
127 
128       PetscCall(DMPlexComputeCellGeometryFEM(*dm, 0, NULL, v0, J, invJ, &detJ));
129       if (detJ < 0) {
130         PetscCall(DMPlexOrientPoint(*dm, 0, -1));
131         PetscCall(DMPlexComputeCellGeometryFEM(*dm, 0, NULL, v0, J, invJ, &detJ));
132         PetscCheck(detJ >= 0,PETSC_COMM_SELF, PETSC_ERR_PLIB, "Something is wrong");
133       }
134     }
135     PetscCall(DMPlexOrient(*dm));
136     PetscCall(DMCreateLabel(*dm, "marker"));
137     PetscCall(DMGetLabel(*dm, "marker", &label));
138     PetscCall(DMPlexMarkBoundaryFaces(*dm, 1, label));
139     PetscCall(DMPlexLabelComplete(*dm, label));
140 
141     PetscCall(PetscViewerDestroy(&viewer));
142     PetscCall(VecRestoreArray(coordinates, &coords));
143     PetscCall(VecRestoreArray(topology, &topo_f));
144     PetscCall(PetscFree(cells));
145     PetscCall(VecDestroy(&coordinates));
146     PetscCall(VecDestroy(&topology));
147 #else
148     SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_SUP,"Reconfigure PETSc with --download-hdf5");
149 #endif
150   }
151   PetscCall(DMSetFromOptions(*dm));
152   PetscCall(DMViewFromOptions(*dm, NULL, "-dm_view"));
153   PetscFunctionReturn(0);
154 }
155 
156 void mass_kernel(PetscInt dim, PetscInt Nf, PetscInt NfAux,
157            const PetscInt uOff[], const PetscInt uOff_x[], const PetscScalar u[], const PetscScalar u_t[], const PetscScalar u_x[],
158            const PetscInt aOff[], const PetscInt aOff_x[], const PetscScalar a[], const PetscScalar a_t[], const PetscScalar a_x[],
159            PetscReal t, PetscReal u_tShift, const PetscReal x[], PetscInt numConstants, const PetscScalar constants[], PetscScalar g0[])
160 {
161   g0[0] = 1.0;
162 }
163 
164 void laplace_kernel(PetscInt dim, PetscInt Nf, PetscInt NfAux,
165            const PetscInt uOff[], const PetscInt uOff_x[], const PetscScalar u[], const PetscScalar u_t[], const PetscScalar u_x[],
166            const PetscInt aOff[], const PetscInt aOff_x[], const PetscScalar a[], const PetscScalar a_t[], const PetscScalar a_x[],
167            PetscReal t, PetscReal u_tShift, const PetscReal x[], PetscInt numConstants, const PetscScalar constants[], PetscScalar g3[])
168 {
169   PetscInt d;
170   for (d = 0; d < dim; ++d) g3[d*dim+d] = 1.0;
171 }
172 
173 /* data we seek to match */
174 PetscErrorCode data_kernel(PetscInt dim, PetscReal time, const PetscReal x[], PetscInt Nf, PetscScalar *y, void *ctx)
175 {
176   *y = 1.0/(2*PETSC_PI*PETSC_PI) * PetscSinReal(PETSC_PI*x[0]) * PetscSinReal(PETSC_PI*x[1]);
177   /* the associated control is sin(pi*x[0])*sin(pi*x[1]) */
178   return 0;
179 }
180 PetscErrorCode zero(PetscInt dim, PetscReal time, const PetscReal x[], PetscInt Nf, PetscScalar *u, void *ctx)
181 {
182   *u = 0.0;
183   return 0;
184 }
185 
186 PetscErrorCode CreateCtx(DM dm, AppCtx* user)
187 {
188 
189   DM             dm_mass;
190   DM             dm_laplace;
191   PetscDS        prob_mass;
192   PetscDS        prob_laplace;
193   PetscFE        fe;
194   DMLabel        label;
195   PetscSection   section;
196   PetscInt       n, k, p, d;
197   PetscInt       dof, off;
198   IS             is;
199   const PetscInt* points;
200   const PetscInt dim = 2;
201   const PetscInt id  = 1;
202   PetscErrorCode (**wtf)(PetscInt dim, PetscReal time, const PetscReal x[], PetscInt Nf, PetscScalar *u, void *ctx);
203 
204   PetscFunctionBeginUser;
205 
206   /* make the data we seek to match */
207   PetscCall(PetscFECreateDefault(PetscObjectComm((PetscObject) dm), dim, 1, PETSC_TRUE, NULL, 4, &fe));
208 
209   PetscCall(DMSetField(dm, 0, NULL, (PetscObject) fe));
210   PetscCall(DMCreateDS(dm));
211   PetscCall(DMCreateGlobalVector(dm, &user->data));
212 
213   /* ugh, this is hideous */
214   /* y_d = interpolate(Expression("sin(x[0]) + .."), V) */
215   PetscCall(PetscMalloc(1 * sizeof(void (*)(const PetscReal[], PetscScalar *, void *)), &wtf));
216   wtf[0] = data_kernel;
217   PetscCall(DMProjectFunction(dm, 0.0, wtf, NULL, INSERT_VALUES, user->data));
218   PetscCall(PetscFree(wtf));
219 
220   /* assemble(inner(u, v)*dx), almost */
221   PetscCall(DMClone(dm, &dm_mass));
222   PetscCall(DMCopyDisc(dm, dm_mass));
223   PetscCall(DMSetNumFields(dm_mass, 1));
224   PetscCall(DMPlexCopyCoordinates(dm, dm_mass)); /* why do I have to do this separately? */
225   PetscCall(DMGetDS(dm_mass, &prob_mass));
226   PetscCall(PetscDSSetJacobian(prob_mass, 0, 0, mass_kernel, NULL, NULL, NULL));
227   PetscCall(PetscDSSetDiscretization(prob_mass, 0, (PetscObject) fe));
228   PetscCall(DMCreateMatrix(dm_mass, &user->mass));
229   PetscCall(DMPlexSNESComputeJacobianFEM(dm_mass, user->data, user->mass, user->mass, NULL));
230   PetscCall(MatSetOption(user->mass, MAT_SYMMETRIC, PETSC_TRUE));
231   PetscCall(DMDestroy(&dm_mass));
232 
233   /* inner(grad(u), grad(v))*dx with homogeneous Dirichlet boundary conditions */
234   PetscCall(DMClone(dm, &dm_laplace));
235   PetscCall(DMCopyDisc(dm, dm_laplace));
236   PetscCall(DMSetNumFields(dm_laplace, 1));
237   PetscCall(DMPlexCopyCoordinates(dm, dm_laplace));
238   PetscCall(DMGetDS(dm_laplace, &prob_laplace));
239   PetscCall(PetscDSSetJacobian(prob_laplace, 0, 0, NULL, NULL, NULL, laplace_kernel));
240   PetscCall(PetscDSSetDiscretization(prob_laplace, 0, (PetscObject) fe));
241   PetscCall(DMCreateMatrix(dm_laplace, &user->laplace));
242   PetscCall(DMPlexSNESComputeJacobianFEM(dm_laplace, user->data, user->laplace, user->laplace, NULL));
243 
244   /* Code from Matt to get the indices associated with the boundary dofs */
245   PetscCall(DMGetLabel(dm_laplace, "marker", &label));
246   PetscCall(DMAddBoundary(dm_laplace, DM_BC_ESSENTIAL, "wall", label, 1, &id, 0, 0, NULL, (void (*)(void)) zero, NULL, NULL, NULL));
247   PetscCall(DMGetLocalSection(dm_laplace, &section));
248   PetscCall(DMLabelGetStratumSize(label, 1, &n));
249   PetscCall(DMLabelGetStratumIS(label, 1, &is));
250   PetscCall(ISGetIndices(is, &points));
251   user->num_bc_dofs = 0;
252   for (p = 0; p < n; ++p) {
253     PetscCall(PetscSectionGetDof(section, points[p], &dof));
254     user->num_bc_dofs += dof;
255   }
256   PetscCall(PetscMalloc1(user->num_bc_dofs, &user->bc_indices));
257   for (p = 0, k = 0; p < n; ++p) {
258     PetscCall(PetscSectionGetDof(section, points[p], &dof));
259     PetscCall(PetscSectionGetOffset(section, points[p], &off));
260     for (d = 0; d < dof; ++d) user->bc_indices[k++] = off+d;
261   }
262   PetscCall(ISRestoreIndices(is, &points));
263   PetscCall(ISDestroy(&is));
264   PetscCall(DMDestroy(&dm_laplace));
265 
266   /* This is how I handle boundary conditions. I can't figure out how to get
267      plex to play with the way I want to impose the BCs. This loses symmetry,
268      but not in a disastrous way. If someone can improve it, please do! */
269   PetscCall(MatZeroRows(user->laplace, user->num_bc_dofs, user->bc_indices, 1.0, NULL, NULL));
270   PetscCall(PetscCalloc1(user->num_bc_dofs, &user->bc_values));
271 
272   /* also create the KSP for solving the Laplace system */
273   PetscCall(KSPCreate(PETSC_COMM_WORLD, &user->ksp_laplace));
274   PetscCall(KSPSetOperators(user->ksp_laplace, user->laplace, user->laplace));
275   PetscCall(KSPSetOptionsPrefix(user->ksp_laplace, "laplace_"));
276   PetscCall(KSPSetFromOptions(user->ksp_laplace));
277 
278   /* A bit of setting up the user context */
279   user->dm = dm;
280   PetscCall(VecDuplicate(user->data, &user->state));
281   PetscCall(VecDuplicate(user->data, &user->adjoint));
282   PetscCall(VecDuplicate(user->data, &user->tmp1));
283   PetscCall(VecDuplicate(user->data, &user->tmp2));
284 
285   PetscCall(PetscFEDestroy(&fe));
286 
287   PetscFunctionReturn(0);
288 }
289 
290 PetscErrorCode DestroyCtx(AppCtx* user)
291 {
292   PetscFunctionBeginUser;
293 
294   PetscCall(MatDestroy(&user->mass));
295   PetscCall(MatDestroy(&user->laplace));
296   PetscCall(KSPDestroy(&user->ksp_laplace));
297   PetscCall(VecDestroy(&user->data));
298   PetscCall(VecDestroy(&user->state));
299   PetscCall(VecDestroy(&user->adjoint));
300   PetscCall(VecDestroy(&user->tmp1));
301   PetscCall(VecDestroy(&user->tmp2));
302   PetscCall(PetscFree(user->bc_indices));
303   PetscCall(PetscFree(user->bc_values));
304 
305   PetscFunctionReturn(0);
306 }
307 
308 PetscErrorCode ReducedFunctionGradient(Tao tao, Vec u, PetscReal* func, Vec g, void* userv)
309 {
310   AppCtx* user = (AppCtx*) userv;
311   const PetscReal alpha = 1.0e-6; /* regularisation parameter */
312   PetscReal inner;
313 
314   PetscFunctionBeginUser;
315 
316   PetscCall(MatMult(user->mass, u, user->tmp1));
317   PetscCall(VecDot(u, user->tmp1, &inner));               /* regularisation contribution to */
318   *func = alpha * 0.5 * inner;                                      /* the functional                 */
319 
320   PetscCall(VecSet(g, 0.0));
321   PetscCall(VecAXPY(g, alpha, user->tmp1));               /* regularisation contribution to the gradient */
322 
323   /* Now compute the forward state. */
324   PetscCall(VecSetValues(user->tmp1, user->num_bc_dofs, user->bc_indices, user->bc_values, INSERT_VALUES));
325   PetscCall(VecAssemblyBegin(user->tmp1));
326   PetscCall(VecAssemblyEnd(user->tmp1));
327   PetscCall(KSPSolve(user->ksp_laplace, user->tmp1, user->state)); /* forward solve */
328 
329   /* Now compute the adjoint state also. */
330   PetscCall(VecCopy(user->state, user->tmp1));
331   PetscCall(VecAXPY(user->tmp1, -1.0, user->data));
332   PetscCall(MatMult(user->mass, user->tmp1, user->tmp2));
333   PetscCall(VecDot(user->tmp1, user->tmp2, &inner));      /* misfit contribution to */
334   *func += 0.5 * inner;                                             /* the functional         */
335 
336   PetscCall(VecSetValues(user->tmp2, user->num_bc_dofs, user->bc_indices, user->bc_values, INSERT_VALUES));
337   PetscCall(VecAssemblyBegin(user->tmp2));
338   PetscCall(VecAssemblyEnd(user->tmp2));
339   PetscCall(KSPSolve(user->ksp_laplace, user->tmp2, user->adjoint)); /* adjoint solve */
340 
341   /* And bring it home with the gradient. */
342   PetscCall(MatMult(user->mass, user->adjoint, user->tmp1));
343   PetscCall(VecAXPY(g, 1.0, user->tmp1));                 /* adjoint contribution to the gradient */
344 
345   PetscFunctionReturn(0);
346 }
347 
348 int main(int argc, char **argv)
349 {
350   DM             dm;
351   Tao            tao;
352   Vec            u, lb, ub;
353   AppCtx         user;
354 
355   PetscCall(PetscInitialize(&argc, &argv, NULL,help));
356   PetscCall(CreateMesh(PETSC_COMM_WORLD, &user, &dm));
357   PetscCall(CreateCtx(dm, &user));
358 
359   PetscCall(DMCreateGlobalVector(dm, &u));
360   PetscCall(VecSet(u, 0.0));
361   PetscCall(VecDuplicate(u, &lb));
362   PetscCall(VecDuplicate(u, &ub));
363   PetscCall(VecSet(lb, 0.0)); /* satisfied at the minimum anyway */
364   PetscCall(VecSet(ub, 0.8)); /* a nontrivial upper bound */
365 
366   PetscCall(TaoCreate(PETSC_COMM_WORLD, &tao));
367   PetscCall(TaoSetSolution(tao, u));
368   PetscCall(TaoSetObjectiveAndGradient(tao,NULL, ReducedFunctionGradient, &user));
369   PetscCall(TaoSetVariableBounds(tao, lb, ub));
370   PetscCall(TaoSetType(tao, TAOBLMVM));
371   PetscCall(TaoSetFromOptions(tao));
372 
373   if (user.use_riesz) {
374     PetscCall(TaoLMVMSetH0(tao, user.mass));       /* crucial for mesh independence */
375     PetscCall(TaoSetGradientNorm(tao, user.mass));
376   }
377 
378   PetscCall(TaoSolve(tao));
379 
380   PetscCall(DMViewFromOptions(dm, NULL, "-dm_view"));
381   PetscCall(VecViewFromOptions(u, NULL, "-sol_view"));
382 
383   PetscCall(TaoDestroy(&tao));
384   PetscCall(DMDestroy(&dm));
385   PetscCall(VecDestroy(&u));
386   PetscCall(VecDestroy(&lb));
387   PetscCall(VecDestroy(&ub));
388   PetscCall(DestroyCtx(&user));
389   PetscCall(PetscFinalize());
390   return 0;
391 }
392 
393 /*TEST
394 
395     build:
396       requires: !complex !single
397 
398     test:
399       requires: hdf5 double datafilespath !defined(PETSC_USE_64BIT_INDICES) hypre
400       args: -laplace_ksp_type cg -laplace_pc_type hypre -laplace_ksp_monitor_true_residual -laplace_ksp_max_it 5 -mat_lmvm_ksp_type cg -mat_lmvm_ksp_rtol 1e-5 -mat_lmvm_pc_type gamg -tao_monitor -petscspace_degree 1 -tao_converged_reason -tao_gatol 1.0e-6 -dm_view hdf5:solution.h5 -sol_view hdf5:solution.h5::append -use_riesz 1 -f $DATAFILESPATH/meshes/mesh-1.h5
401       filter: sed -e "s/-nan/nan/g"
402 
403     test:
404       suffix: guess_pod
405       requires: double triangle
406       args: -laplace_ksp_type cg -laplace_pc_type gamg -laplace_ksp_monitor_true_residual -laplace_ksp_max_it 8 -laplace_pc_gamg_esteig_ksp_type cg -laplace_pc_gamg_esteig_ksp_max_it 5 -laplace_mg_levels_ksp_chebyshev_esteig 0,0.25,0,1.0 -laplace_ksp_converged_reason -mat_lmvm_ksp_type cg -mat_lmvm_ksp_rtol 1e-5 -mat_lmvm_pc_type gamg -mat_lmvm_pc_gamg_esteig_ksp_type cg -mat_lmvm_pc_gamg_esteig_ksp_max_it 3 -tao_monitor -petscspace_degree 1 -tao_converged_reason -dm_refine 0 -laplace_ksp_guess_type pod -tao_gatol 1e-6
407       filter: sed -e "s/-nan/nan/g" -e "s/-NaN/nan/g" -e "s/NaN/nan/g" -e "s/CONVERGED_RTOL iterations 9/CONVERGED_RTOL iterations 8/g" -e "s/CONVERGED_RTOL iterations 4/CONVERGED_RTOL iterations 3/g"
408 
409 TEST*/
410