xref: /petsc/src/snes/tutorials/ex46.c (revision 750b007cd8d816cecd9de99077bb0a703b4cf61a)
1 static char help[] = "Surface processes in geophysics.\n\n";
2 
3 #include <petscsnes.h>
4 #include <petscdm.h>
5 #include <petscdmda.h>
6 
7 /*
8    User-defined application context - contains data needed by the
9    application-provided call-back routines, FormJacobianLocal() and
10    FormFunctionLocal().
11 */
12 typedef struct {
13   PetscReal D; /* The diffusion coefficient */
14   PetscReal K; /* The advection coefficient */
15   PetscInt  m; /* Exponent for A */
16 } AppCtx;
17 
18 /*
19    User-defined routines
20 */
21 extern PetscErrorCode FormFunctionLocal(DMDALocalInfo *, PetscScalar **, PetscScalar **, AppCtx *);
22 extern PetscErrorCode FormJacobianLocal(DMDALocalInfo *, PetscScalar **, Mat, AppCtx *);
23 
24 int main(int argc, char **argv) {
25   SNES     snes; /* nonlinear solver */
26   AppCtx   user; /* user-defined work context */
27   PetscInt its;  /* iterations for convergence */
28   DM       da;
29 
30   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
31      Initialize program
32      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
33 
34   PetscFunctionBeginUser;
35   PetscCall(PetscInitialize(&argc, &argv, (char *)0, help));
36 
37   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
38      Initialize problem parameters
39   - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
40   PetscOptionsBegin(PETSC_COMM_WORLD, "", "Surface Process Problem Options", "SNES");
41   user.D = 1.0;
42   PetscCall(PetscOptionsReal("-D", "The diffusion coefficient D", __FILE__, user.D, &user.D, NULL));
43   user.K = 1.0;
44   PetscCall(PetscOptionsReal("-K", "The advection coefficient K", __FILE__, user.K, &user.K, NULL));
45   user.m = 1;
46   PetscCall(PetscOptionsInt("-m", "The exponent for A", __FILE__, user.m, &user.m, NULL));
47   PetscOptionsEnd();
48 
49   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
50      Create distributed array (DMDA) to manage parallel grid and vectors
51   - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
52   PetscCall(DMDACreate2d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DMDA_STENCIL_STAR, 4, 4, PETSC_DECIDE, PETSC_DECIDE, 1, 1, NULL, NULL, &da));
53   PetscCall(DMSetFromOptions(da));
54   PetscCall(DMSetUp(da));
55   PetscCall(DMDASetUniformCoordinates(da, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0));
56   PetscCall(DMSetApplicationContext(da, &user));
57   PetscCall(SNESCreate(PETSC_COMM_WORLD, &snes));
58   PetscCall(SNESSetDM(snes, da));
59 
60   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
61      Set local function evaluation routine
62   - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
63   PetscCall(DMDASNESSetFunctionLocal(da, INSERT_VALUES, (PetscErrorCode(*)(DMDALocalInfo *, void *, void *, void *))FormFunctionLocal, &user));
64 
65   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
66      Customize solver; set runtime options
67    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
68   PetscCall(SNESSetFromOptions(snes));
69 
70   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
71      Solve nonlinear system
72      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
73   PetscCall(SNESSolve(snes, 0, 0));
74   PetscCall(SNESGetIterationNumber(snes, &its));
75 
76   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
77    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
78   PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Number of SNES iterations = %" PetscInt_FMT "\n", its));
79 
80   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
81      Free work space.  All PETSc objects should be destroyed when they
82      are no longer needed.
83    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
84 
85   PetscCall(SNESDestroy(&snes));
86   PetscCall(DMDestroy(&da));
87 
88   PetscCall(PetscFinalize());
89   return 0;
90 }
91 
92 PetscScalar funcU(DMDACoor2d *coords) {
93   return coords->x + coords->y;
94 }
95 
96 PetscScalar funcA(PetscScalar z, AppCtx *user) {
97   PetscScalar v = 1.0;
98   PetscInt    i;
99 
100   for (i = 0; i < user->m; ++i) v *= z;
101   return v;
102 }
103 
104 PetscScalar funcADer(PetscScalar z, AppCtx *user) {
105   PetscScalar v = 1.0;
106   PetscInt    i;
107 
108   for (i = 0; i < user->m - 1; ++i) v *= z;
109   return (PetscScalar)user->m * v;
110 }
111 
112 /*
113    FormFunctionLocal - Evaluates nonlinear function, F(x).
114 */
115 PetscErrorCode FormFunctionLocal(DMDALocalInfo *info, PetscScalar **x, PetscScalar **f, AppCtx *user) {
116   DM           coordDA;
117   Vec          coordinates;
118   DMDACoor2d **coords;
119   PetscScalar  u, ux, uy, uxx, uyy;
120   PetscReal    D, K, hx, hy, hxdhy, hydhx;
121   PetscInt     i, j;
122 
123   PetscFunctionBeginUser;
124   D     = user->D;
125   K     = user->K;
126   hx    = 1.0 / (PetscReal)(info->mx - 1);
127   hy    = 1.0 / (PetscReal)(info->my - 1);
128   hxdhy = hx / hy;
129   hydhx = hy / hx;
130   /*
131      Compute function over the locally owned part of the grid
132   */
133   PetscCall(DMGetCoordinateDM(info->da, &coordDA));
134   PetscCall(DMGetCoordinates(info->da, &coordinates));
135   PetscCall(DMDAVecGetArray(coordDA, coordinates, &coords));
136   for (j = info->ys; j < info->ys + info->ym; j++) {
137     for (i = info->xs; i < info->xs + info->xm; i++) {
138       if (i == 0 || j == 0 || i == info->mx - 1 || j == info->my - 1) f[j][i] = x[j][i];
139       else {
140         u       = x[j][i];
141         ux      = (x[j][i + 1] - x[j][i]) / hx;
142         uy      = (x[j + 1][i] - x[j][i]) / hy;
143         uxx     = (2.0 * u - x[j][i - 1] - x[j][i + 1]) * hydhx;
144         uyy     = (2.0 * u - x[j - 1][i] - x[j + 1][i]) * hxdhy;
145         f[j][i] = D * (uxx + uyy) - (K * funcA(x[j][i], user) * PetscSqrtScalar(ux * ux + uy * uy) + funcU(&coords[j][i])) * hx * hy;
146         PetscCheck(!PetscIsInfOrNanScalar(f[j][i]), PETSC_COMM_SELF, PETSC_ERR_FP, "Invalid residual: %g", (double)PetscRealPart(f[j][i]));
147       }
148     }
149   }
150   PetscCall(DMDAVecRestoreArray(coordDA, coordinates, &coords));
151   PetscCall(PetscLogFlops(11.0 * info->ym * info->xm));
152   PetscFunctionReturn(0);
153 }
154 
155 /*
156    FormJacobianLocal - Evaluates Jacobian matrix.
157 */
158 PetscErrorCode FormJacobianLocal(DMDALocalInfo *info, PetscScalar **x, Mat jac, AppCtx *user) {
159   MatStencil  col[5], row;
160   PetscScalar D, K, A, v[5], hx, hy, hxdhy, hydhx, ux, uy;
161   PetscReal   normGradZ;
162   PetscInt    i, j, k;
163 
164   PetscFunctionBeginUser;
165   D     = user->D;
166   K     = user->K;
167   hx    = 1.0 / (PetscReal)(info->mx - 1);
168   hy    = 1.0 / (PetscReal)(info->my - 1);
169   hxdhy = hx / hy;
170   hydhx = hy / hx;
171 
172   /*
173      Compute entries for the locally owned part of the Jacobian.
174       - Currently, all PETSc parallel matrix formats are partitioned by
175         contiguous chunks of rows across the processors.
176       - Each processor needs to insert only elements that it owns
177         locally (but any non-local elements will be sent to the
178         appropriate processor during matrix assembly).
179       - Here, we set all entries for a particular row at once.
180       - We can set matrix entries either using either
181         MatSetValuesLocal() or MatSetValues(), as discussed above.
182   */
183   for (j = info->ys; j < info->ys + info->ym; j++) {
184     for (i = info->xs; i < info->xs + info->xm; i++) {
185       row.j = j;
186       row.i = i;
187       if (i == 0 || j == 0 || i == info->mx - 1 || j == info->my - 1) {
188         /* boundary points */
189         v[0] = 1.0;
190         PetscCall(MatSetValuesStencil(jac, 1, &row, 1, &row, v, INSERT_VALUES));
191       } else {
192         /* interior grid points */
193         ux        = (x[j][i + 1] - x[j][i]) / hx;
194         uy        = (x[j + 1][i] - x[j][i]) / hy;
195         normGradZ = PetscRealPart(PetscSqrtScalar(ux * ux + uy * uy));
196         if (normGradZ < 1.0e-8) normGradZ = 1.0e-8;
197         A = funcA(x[j][i], user);
198 
199         v[0]     = -D * hxdhy;
200         col[0].j = j - 1;
201         col[0].i = i;
202         v[1]     = -D * hydhx;
203         col[1].j = j;
204         col[1].i = i - 1;
205         v[2]     = D * 2.0 * (hydhx + hxdhy) + K * (funcADer(x[j][i], user) * normGradZ - A / normGradZ) * hx * hy;
206         col[2].j = row.j;
207         col[2].i = row.i;
208         v[3]     = -D * hydhx + K * A * hx * hy / (2.0 * normGradZ);
209         col[3].j = j;
210         col[3].i = i + 1;
211         v[4]     = -D * hxdhy + K * A * hx * hy / (2.0 * normGradZ);
212         col[4].j = j + 1;
213         col[4].i = i;
214         for (k = 0; k < 5; ++k) PetscCheck(!PetscIsInfOrNanScalar(v[k]), PETSC_COMM_SELF, PETSC_ERR_FP, "Invalid residual: %g", (double)PetscRealPart(v[k]));
215         PetscCall(MatSetValuesStencil(jac, 1, &row, 5, col, v, INSERT_VALUES));
216       }
217     }
218   }
219 
220   /*
221      Assemble matrix, using the 2-step process:
222        MatAssemblyBegin(), MatAssemblyEnd().
223   */
224   PetscCall(MatAssemblyBegin(jac, MAT_FINAL_ASSEMBLY));
225   PetscCall(MatAssemblyEnd(jac, MAT_FINAL_ASSEMBLY));
226   /*
227      Tell the matrix we will never add a new nonzero location to the
228      matrix. If we do, it will generate an error.
229   */
230   PetscCall(MatSetOption(jac, MAT_NEW_NONZERO_LOCATION_ERR, PETSC_TRUE));
231   PetscFunctionReturn(0);
232 }
233 
234 /*TEST
235 
236    test:
237       args: -snes_view -snes_monitor_short -da_refine 1 -pc_type mg -ksp_type fgmres -pc_mg_type full -mg_levels_ksp_chebyshev_esteig 0.5,1.1
238 
239    test:
240       suffix: ew_1
241       args: -snes_monitor_short -ksp_converged_reason -da_grid_x 20 -da_grid_y 20 -snes_ksp_ew -snes_ksp_ew_version 1
242       requires: !single
243 
244    test:
245       suffix: ew_2
246       args: -snes_monitor_short -ksp_converged_reason -da_grid_x 20 -da_grid_y 20 -snes_ksp_ew -snes_ksp_ew_version 2
247 
248    test:
249       suffix: ew_3
250       args: -snes_monitor_short -ksp_converged_reason -da_grid_x 20 -da_grid_y 20 -snes_ksp_ew -snes_ksp_ew_version 3
251       requires: !single
252 
253    test:
254       suffix: fm_rise_2
255       args: -K 3 -m 1 -D 0.2 -snes_monitor_short -snes_type ngmres -snes_npc_side right -npc_snes_type newtonls -npc_snes_linesearch_type basic -snes_ngmres_restart_it 1 -snes_ngmres_restart_fm_rise
256       requires: !single
257 
258    test:
259       suffix: fm_rise_4
260       args: -K 3 -m 1 -D 0.2 -snes_monitor_short -snes_type ngmres -snes_npc_side right -npc_snes_type newtonls -npc_snes_linesearch_type basic -snes_ngmres_restart_it 2 -snes_ngmres_restart_fm_rise -snes_rtol 1.e-2 -snes_max_it 5
261 
262 TEST*/
263