xref: /petsc/src/snes/tutorials/ex46.c (revision 64eb36536758ba5fde9eda0d4d7f84c5f43dcef0)
1 static char help[] = "Surface processes in geophysics.\n\n";
2 
3 #include <petscsnes.h>
4 #include <petscdm.h>
5 #include <petscdmda.h>
6 
7 /*
8    User-defined application context - contains data needed by the
9    application-provided call-back routines, FormJacobianLocal() and
10    FormFunctionLocal().
11 */
12 typedef struct {
13   PetscReal   D;  /* The diffusion coefficient */
14   PetscReal   K;  /* The advection coefficient */
15   PetscInt    m;  /* Exponent for A */
16 } AppCtx;
17 
18 /*
19    User-defined routines
20 */
21 extern PetscErrorCode FormFunctionLocal(DMDALocalInfo*,PetscScalar**,PetscScalar**,AppCtx*);
22 extern PetscErrorCode FormJacobianLocal(DMDALocalInfo*,PetscScalar**,Mat,AppCtx*);
23 
24 int main(int argc,char **argv)
25 {
26   SNES           snes;                         /* nonlinear solver */
27   AppCtx         user;                         /* user-defined work context */
28   PetscInt       its;                          /* iterations for convergence */
29   DM             da;
30 
31   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
32      Initialize program
33      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
34 
35   PetscCall(PetscInitialize(&argc,&argv,(char*)0,help));
36 
37   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
38      Initialize problem parameters
39   - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
40   PetscOptionsBegin(PETSC_COMM_WORLD, "", "Surface Process Problem Options", "SNES");
41   user.D = 1.0;
42   PetscCall(PetscOptionsReal("-D", "The diffusion coefficient D", __FILE__, user.D, &user.D, NULL));
43   user.K = 1.0;
44   PetscCall(PetscOptionsReal("-K", "The advection coefficient K", __FILE__, user.K, &user.K, NULL));
45   user.m = 1;
46   PetscCall(PetscOptionsInt("-m", "The exponent for A", __FILE__, user.m, &user.m, NULL));
47   PetscOptionsEnd();
48 
49   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
50      Create distributed array (DMDA) to manage parallel grid and vectors
51   - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
52   PetscCall(DMDACreate2d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE,DMDA_STENCIL_STAR,4,4,PETSC_DECIDE,PETSC_DECIDE,1,1,NULL,NULL,&da));
53   PetscCall(DMSetFromOptions(da));
54   PetscCall(DMSetUp(da));
55   PetscCall(DMDASetUniformCoordinates(da, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0));
56   PetscCall(DMSetApplicationContext(da,&user));
57   PetscCall(SNESCreate(PETSC_COMM_WORLD, &snes));
58   PetscCall(SNESSetDM(snes, da));
59 
60   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
61      Set local function evaluation routine
62   - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
63   PetscCall(DMDASNESSetFunctionLocal(da,INSERT_VALUES,(PetscErrorCode (*)(DMDALocalInfo*,void*,void*,void*))FormFunctionLocal,&user));
64 
65   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
66      Customize solver; set runtime options
67    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
68   PetscCall(SNESSetFromOptions(snes));
69 
70   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
71      Solve nonlinear system
72      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
73   PetscCall(SNESSolve(snes,0,0));
74   PetscCall(SNESGetIterationNumber(snes,&its));
75 
76   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
77    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
78   PetscCall(PetscPrintf(PETSC_COMM_WORLD,"Number of SNES iterations = %D\n",its));
79 
80   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
81      Free work space.  All PETSc objects should be destroyed when they
82      are no longer needed.
83    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
84 
85   PetscCall(SNESDestroy(&snes));
86   PetscCall(DMDestroy(&da));
87 
88   PetscCall(PetscFinalize());
89   return 0;
90 }
91 
92 PetscScalar funcU(DMDACoor2d *coords)
93 {
94   return coords->x + coords->y;
95 }
96 
97 PetscScalar funcA(PetscScalar z, AppCtx *user)
98 {
99   PetscScalar v = 1.0;
100   PetscInt    i;
101 
102   for (i = 0; i < user->m; ++i) v *= z;
103   return v;
104 }
105 
106 PetscScalar funcADer(PetscScalar z, AppCtx *user)
107 {
108   PetscScalar v = 1.0;
109   PetscInt    i;
110 
111   for (i = 0; i < user->m-1; ++i) v *= z;
112   return (PetscScalar)user->m*v;
113 }
114 
115 /*
116    FormFunctionLocal - Evaluates nonlinear function, F(x).
117 */
118 PetscErrorCode FormFunctionLocal(DMDALocalInfo *info,PetscScalar **x,PetscScalar **f,AppCtx *user)
119 {
120   DM             coordDA;
121   Vec            coordinates;
122   DMDACoor2d     **coords;
123   PetscScalar    u, ux, uy, uxx, uyy;
124   PetscReal      D, K, hx, hy, hxdhy, hydhx;
125   PetscInt       i,j;
126 
127   PetscFunctionBeginUser;
128   D     = user->D;
129   K     = user->K;
130   hx    = 1.0/(PetscReal)(info->mx-1);
131   hy    = 1.0/(PetscReal)(info->my-1);
132   hxdhy = hx/hy;
133   hydhx = hy/hx;
134   /*
135      Compute function over the locally owned part of the grid
136   */
137   PetscCall(DMGetCoordinateDM(info->da, &coordDA));
138   PetscCall(DMGetCoordinates(info->da, &coordinates));
139   PetscCall(DMDAVecGetArray(coordDA, coordinates, &coords));
140   for (j=info->ys; j<info->ys+info->ym; j++) {
141     for (i=info->xs; i<info->xs+info->xm; i++) {
142       if (i == 0 || j == 0 || i == info->mx-1 || j == info->my-1) f[j][i] = x[j][i];
143       else {
144         u       = x[j][i];
145         ux      = (x[j][i+1] - x[j][i])/hx;
146         uy      = (x[j+1][i] - x[j][i])/hy;
147         uxx     = (2.0*u - x[j][i-1] - x[j][i+1])*hydhx;
148         uyy     = (2.0*u - x[j-1][i] - x[j+1][i])*hxdhy;
149         f[j][i] = D*(uxx + uyy) - (K*funcA(x[j][i], user)*PetscSqrtScalar(ux*ux + uy*uy) + funcU(&coords[j][i]))*hx*hy;
150         PetscCheck(!PetscIsInfOrNanScalar(f[j][i]),PETSC_COMM_SELF,PETSC_ERR_FP, "Invalid residual: %g", (double)PetscRealPart(f[j][i]));
151       }
152     }
153   }
154   PetscCall(DMDAVecRestoreArray(coordDA, coordinates, &coords));
155   PetscCall(PetscLogFlops(11.0*info->ym*info->xm));
156   PetscFunctionReturn(0);
157 }
158 
159 /*
160    FormJacobianLocal - Evaluates Jacobian matrix.
161 */
162 PetscErrorCode FormJacobianLocal(DMDALocalInfo *info,PetscScalar **x,Mat jac,AppCtx *user)
163 {
164   MatStencil     col[5], row;
165   PetscScalar    D, K, A, v[5], hx, hy, hxdhy, hydhx, ux, uy;
166   PetscReal      normGradZ;
167   PetscInt       i, j,k;
168 
169   PetscFunctionBeginUser;
170   D     = user->D;
171   K     = user->K;
172   hx    = 1.0/(PetscReal)(info->mx-1);
173   hy    = 1.0/(PetscReal)(info->my-1);
174   hxdhy = hx/hy;
175   hydhx = hy/hx;
176 
177   /*
178      Compute entries for the locally owned part of the Jacobian.
179       - Currently, all PETSc parallel matrix formats are partitioned by
180         contiguous chunks of rows across the processors.
181       - Each processor needs to insert only elements that it owns
182         locally (but any non-local elements will be sent to the
183         appropriate processor during matrix assembly).
184       - Here, we set all entries for a particular row at once.
185       - We can set matrix entries either using either
186         MatSetValuesLocal() or MatSetValues(), as discussed above.
187   */
188   for (j=info->ys; j<info->ys+info->ym; j++) {
189     for (i=info->xs; i<info->xs+info->xm; i++) {
190       row.j = j; row.i = i;
191       if (i == 0 || j == 0 || i == info->mx-1 || j == info->my-1) {
192         /* boundary points */
193         v[0] = 1.0;
194         PetscCall(MatSetValuesStencil(jac,1,&row,1,&row,v,INSERT_VALUES));
195       } else {
196         /* interior grid points */
197         ux        = (x[j][i+1] - x[j][i])/hx;
198         uy        = (x[j+1][i] - x[j][i])/hy;
199         normGradZ = PetscRealPart(PetscSqrtScalar(ux*ux + uy*uy));
200         if (normGradZ < 1.0e-8) normGradZ = 1.0e-8;
201         A = funcA(x[j][i], user);
202 
203         v[0] = -D*hxdhy;                                                                          col[0].j = j - 1; col[0].i = i;
204         v[1] = -D*hydhx;                                                                          col[1].j = j;     col[1].i = i-1;
205         v[2] = D*2.0*(hydhx + hxdhy) + K*(funcADer(x[j][i], user)*normGradZ - A/normGradZ)*hx*hy; col[2].j = row.j; col[2].i = row.i;
206         v[3] = -D*hydhx + K*A*hx*hy/(2.0*normGradZ);                                              col[3].j = j;     col[3].i = i+1;
207         v[4] = -D*hxdhy + K*A*hx*hy/(2.0*normGradZ);                                              col[4].j = j + 1; col[4].i = i;
208         for (k = 0; k < 5; ++k) {
209           PetscCheck(!PetscIsInfOrNanScalar(v[k]),PETSC_COMM_SELF,PETSC_ERR_FP, "Invalid residual: %g", (double)PetscRealPart(v[k]));
210         }
211         PetscCall(MatSetValuesStencil(jac,1,&row,5,col,v,INSERT_VALUES));
212       }
213     }
214   }
215 
216   /*
217      Assemble matrix, using the 2-step process:
218        MatAssemblyBegin(), MatAssemblyEnd().
219   */
220   PetscCall(MatAssemblyBegin(jac,MAT_FINAL_ASSEMBLY));
221   PetscCall(MatAssemblyEnd(jac,MAT_FINAL_ASSEMBLY));
222   /*
223      Tell the matrix we will never add a new nonzero location to the
224      matrix. If we do, it will generate an error.
225   */
226   PetscCall(MatSetOption(jac,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE));
227   PetscFunctionReturn(0);
228 }
229 
230 /*TEST
231 
232    test:
233       args: -snes_view -snes_monitor_short -da_refine 1 -pc_type mg -ksp_type fgmres -pc_mg_type full -mg_levels_ksp_chebyshev_esteig 0.5,1.1
234 
235    test:
236       suffix: ew_1
237       args: -snes_monitor_short -ksp_converged_reason -da_grid_x 20 -da_grid_y 20 -snes_ksp_ew -snes_ksp_ew_version 1
238       requires: !single
239 
240    test:
241       suffix: ew_2
242       args: -snes_monitor_short -ksp_converged_reason -da_grid_x 20 -da_grid_y 20 -snes_ksp_ew -snes_ksp_ew_version 2
243 
244    test:
245       suffix: ew_3
246       args: -snes_monitor_short -ksp_converged_reason -da_grid_x 20 -da_grid_y 20 -snes_ksp_ew -snes_ksp_ew_version 3
247       requires: !single
248 
249    test:
250       suffix: fm_rise_2
251       args: -K 3 -m 1 -D 0.2 -snes_monitor_short -snes_type ngmres -snes_npc_side right -npc_snes_type newtonls -npc_snes_linesearch_type basic -snes_ngmres_restart_it 1 -snes_ngmres_restart_fm_rise
252       requires: !single
253 
254    test:
255       suffix: fm_rise_4
256       args: -K 3 -m 1 -D 0.2 -snes_monitor_short -snes_type ngmres -snes_npc_side right -npc_snes_type newtonls -npc_snes_linesearch_type basic -snes_ngmres_restart_it 2 -snes_ngmres_restart_fm_rise -snes_rtol 1.e-2 -snes_max_it 5
257 
258 TEST*/
259