xref: /petsc/src/snes/tutorials/ex35.c (revision 5f80ce2ab25dff0f4601e710601cbbcecf323266)
1 static const char help[] = "-Laplacian u = b as a nonlinear problem.\n\n";
2 
3 /*T
4    Concepts: SNES^parallel Bratu example
5    Concepts: DMDA^using distributed arrays;
6    Concepts: IS coloirng types;
7    Processors: n
8 T*/
9 
10 /*
11 
12     The linear and nonlinear versions of these should give almost identical results on this problem
13 
14     Richardson
15       Nonlinear:
16         -snes_rtol 1.e-12 -snes_monitor -snes_type nrichardson -snes_linesearch_monitor
17 
18       Linear:
19         -snes_rtol 1.e-12 -snes_monitor -ksp_rtol 1.e-12  -ksp_monitor -ksp_type richardson -pc_type none -ksp_richardson_self_scale -info
20 
21     GMRES
22       Nonlinear:
23        -snes_rtol 1.e-12 -snes_monitor  -snes_type ngmres
24 
25       Linear:
26        -snes_rtol 1.e-12 -snes_monitor  -ksp_type gmres -ksp_monitor -ksp_rtol 1.e-12 -pc_type none
27 
28     CG
29        Nonlinear:
30             -snes_rtol 1.e-12 -snes_monitor  -snes_type ncg -snes_linesearch_monitor
31 
32        Linear:
33              -snes_rtol 1.e-12 -snes_monitor  -ksp_type cg -ksp_monitor -ksp_rtol 1.e-12 -pc_type none
34 
35     Multigrid
36        Linear:
37           1 level:
38             -snes_rtol 1.e-12 -snes_monitor  -pc_type mg -mg_levels_ksp_type richardson -mg_levels_pc_type none -mg_levels_ksp_monitor
39             -mg_levels_ksp_richardson_self_scale -ksp_type richardson -ksp_monitor -ksp_rtol 1.e-12  -ksp_monitor_true_residual
40 
41           n levels:
42             -da_refine n
43 
44        Nonlinear:
45          1 level:
46            -snes_rtol 1.e-12 -snes_monitor  -snes_type fas -fas_levels_snes_monitor
47 
48           n levels:
49             -da_refine n  -fas_coarse_snes_type newtonls -fas_coarse_pc_type lu -fas_coarse_ksp_type preonly
50 
51 */
52 
53 /*
54    Include "petscdmda.h" so that we can use distributed arrays (DMDAs).
55    Include "petscsnes.h" so that we can use SNES solvers.  Note that this
56 */
57 #include <petscdm.h>
58 #include <petscdmda.h>
59 #include <petscsnes.h>
60 
61 /*
62    User-defined routines
63 */
64 extern PetscErrorCode FormMatrix(DM,Mat);
65 extern PetscErrorCode MyComputeFunction(SNES,Vec,Vec,void*);
66 extern PetscErrorCode MyComputeJacobian(SNES,Vec,Mat,Mat,void*);
67 extern PetscErrorCode NonlinearGS(SNES,Vec);
68 
69 int main(int argc,char **argv)
70 {
71   SNES           snes;                                 /* nonlinear solver */
72   SNES           psnes;                                /* nonlinear Gauss-Seidel approximate solver */
73   Vec            x,b;                                  /* solution vector */
74   PetscInt       its;                                  /* iterations for convergence */
75   PetscErrorCode ierr;
76   DM             da;
77   PetscBool      use_ngs_as_npc = PETSC_FALSE;                /* use the nonlinear Gauss-Seidel approximate solver */
78 
79   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
80      Initialize program
81      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
82 
83   ierr = PetscInitialize(&argc,&argv,(char*)0,help);if (ierr) return ierr;
84 
85   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
86      Create nonlinear solver context
87      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
88   CHKERRQ(SNESCreate(PETSC_COMM_WORLD,&snes));
89 
90   CHKERRQ(PetscOptionsGetBool(NULL,NULL,"-use_ngs_as_npc",&use_ngs_as_npc,0));
91 
92   if (use_ngs_as_npc) {
93     CHKERRQ(SNESGetNPC(snes,&psnes));
94     CHKERRQ(SNESSetType(psnes,SNESSHELL));
95     CHKERRQ(SNESShellSetSolve(psnes,NonlinearGS));
96   }
97 
98   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
99      Create distributed array (DMDA) to manage parallel grid and vectors
100   - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
101   CHKERRQ(DMDACreate2d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE,DMDA_STENCIL_STAR,4,4,PETSC_DECIDE,PETSC_DECIDE,1,1,NULL,NULL,&da));
102   CHKERRQ(DMSetFromOptions(da));
103   CHKERRQ(DMSetUp(da));
104   CHKERRQ(DMDASetUniformCoordinates(da, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0));
105   CHKERRQ(SNESSetDM(snes,da));
106   if (use_ngs_as_npc) {
107     CHKERRQ(SNESShellSetContext(psnes,da));
108   }
109   /*  - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
110      Extract global vectors from DMDA; then duplicate for remaining
111      vectors that are the same types
112    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
113   CHKERRQ(DMCreateGlobalVector(da,&x));
114   CHKERRQ(DMCreateGlobalVector(da,&b));
115   CHKERRQ(VecSet(b,1.0));
116 
117   CHKERRQ(SNESSetFunction(snes,NULL,MyComputeFunction,NULL));
118   CHKERRQ(SNESSetJacobian(snes,NULL,NULL,MyComputeJacobian,NULL));
119 
120   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
121      Customize nonlinear solver; set runtime options
122    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
123   CHKERRQ(SNESSetFromOptions(snes));
124 
125   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
126      Solve nonlinear system
127      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
128   CHKERRQ(SNESSolve(snes,b,x));
129   CHKERRQ(SNESGetIterationNumber(snes,&its));
130 
131   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
132    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
133 
134   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
135      Free work space.  All PETSc objects should be destroyed when they
136      are no longer needed.
137    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
138   CHKERRQ(VecDestroy(&x));
139   CHKERRQ(VecDestroy(&b));
140   CHKERRQ(SNESDestroy(&snes));
141   CHKERRQ(DMDestroy(&da));
142   ierr = PetscFinalize();
143   return ierr;
144 }
145 
146 /* ------------------------------------------------------------------- */
147 PetscErrorCode MyComputeFunction(SNES snes,Vec x,Vec F,void *ctx)
148 {
149   Mat            J;
150   DM             dm;
151 
152   PetscFunctionBeginUser;
153   CHKERRQ(SNESGetDM(snes,&dm));
154   CHKERRQ(DMGetApplicationContext(dm,&J));
155   if (!J) {
156     CHKERRQ(DMSetMatType(dm,MATAIJ));
157     CHKERRQ(DMCreateMatrix(dm,&J));
158     CHKERRQ(MatSetDM(J, NULL));
159     CHKERRQ(FormMatrix(dm,J));
160     CHKERRQ(DMSetApplicationContext(dm,J));
161     CHKERRQ(DMSetApplicationContextDestroy(dm,(PetscErrorCode (*)(void**))MatDestroy));
162   }
163   CHKERRQ(MatMult(J,x,F));
164   PetscFunctionReturn(0);
165 }
166 
167 PetscErrorCode MyComputeJacobian(SNES snes,Vec x,Mat J,Mat Jp,void *ctx)
168 {
169   DM             dm;
170 
171   PetscFunctionBeginUser;
172   CHKERRQ(SNESGetDM(snes,&dm));
173   CHKERRQ(FormMatrix(dm,Jp));
174   PetscFunctionReturn(0);
175 }
176 
177 PetscErrorCode FormMatrix(DM da,Mat jac)
178 {
179   PetscInt       i,j,nrows = 0;
180   MatStencil     col[5],row,*rows;
181   PetscScalar    v[5],hx,hy,hxdhy,hydhx;
182   DMDALocalInfo  info;
183 
184   PetscFunctionBeginUser;
185   CHKERRQ(DMDAGetLocalInfo(da,&info));
186   hx    = 1.0/(PetscReal)(info.mx-1);
187   hy    = 1.0/(PetscReal)(info.my-1);
188   hxdhy = hx/hy;
189   hydhx = hy/hx;
190 
191   CHKERRQ(PetscMalloc1(info.ym*info.xm,&rows));
192   /*
193      Compute entries for the locally owned part of the Jacobian.
194       - Currently, all PETSc parallel matrix formats are partitioned by
195         contiguous chunks of rows across the processors.
196       - Each processor needs to insert only elements that it owns
197         locally (but any non-local elements will be sent to the
198         appropriate processor during matrix assembly).
199       - Here, we set all entries for a particular row at once.
200       - We can set matrix entries either using either
201         MatSetValuesLocal() or MatSetValues(), as discussed above.
202   */
203   for (j=info.ys; j<info.ys+info.ym; j++) {
204     for (i=info.xs; i<info.xs+info.xm; i++) {
205       row.j = j; row.i = i;
206       /* boundary points */
207       if (i == 0 || j == 0 || i == info.mx-1 || j == info.my-1) {
208         v[0]            = 2.0*(hydhx + hxdhy);
209         CHKERRQ(MatSetValuesStencil(jac,1,&row,1,&row,v,INSERT_VALUES));
210         rows[nrows].i   = i;
211         rows[nrows++].j = j;
212       } else {
213         /* interior grid points */
214         v[0] = -hxdhy;                                           col[0].j = j - 1; col[0].i = i;
215         v[1] = -hydhx;                                           col[1].j = j;     col[1].i = i-1;
216         v[2] = 2.0*(hydhx + hxdhy);                              col[2].j = row.j; col[2].i = row.i;
217         v[3] = -hydhx;                                           col[3].j = j;     col[3].i = i+1;
218         v[4] = -hxdhy;                                           col[4].j = j + 1; col[4].i = i;
219         CHKERRQ(MatSetValuesStencil(jac,1,&row,5,col,v,INSERT_VALUES));
220       }
221     }
222   }
223 
224   /*
225      Assemble matrix, using the 2-step process:
226        MatAssemblyBegin(), MatAssemblyEnd().
227   */
228   CHKERRQ(MatAssemblyBegin(jac,MAT_FINAL_ASSEMBLY));
229   CHKERRQ(MatAssemblyEnd(jac,MAT_FINAL_ASSEMBLY));
230   CHKERRQ(MatZeroRowsColumnsStencil(jac,nrows,rows,2.0*(hydhx + hxdhy),NULL,NULL));
231   CHKERRQ(PetscFree(rows));
232   /*
233      Tell the matrix we will never add a new nonzero location to the
234      matrix. If we do, it will generate an error.
235   */
236   CHKERRQ(MatSetOption(jac,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE));
237   PetscFunctionReturn(0);
238 }
239 
240 /* ------------------------------------------------------------------- */
241 /*
242       Applies some sweeps on nonlinear Gauss-Seidel on each process
243 
244  */
245 PetscErrorCode NonlinearGS(SNES snes,Vec X)
246 {
247   PetscInt       i,j,Mx,My,xs,ys,xm,ym,its,l;
248   PetscReal      hx,hy,hxdhy,hydhx;
249   PetscScalar    **x,F,J,u,uxx,uyy;
250   DM             da;
251   Vec            localX;
252 
253   PetscFunctionBeginUser;
254   CHKERRQ(SNESGetTolerances(snes,NULL,NULL,NULL,&its,NULL));
255   CHKERRQ(SNESShellGetContext(snes,&da));
256 
257   CHKERRQ(DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE));
258 
259   hx    = 1.0/(PetscReal)(Mx-1);
260   hy    = 1.0/(PetscReal)(My-1);
261   hxdhy = hx/hy;
262   hydhx = hy/hx;
263 
264   CHKERRQ(DMGetLocalVector(da,&localX));
265 
266   for (l=0; l<its; l++) {
267 
268     CHKERRQ(DMGlobalToLocalBegin(da,X,INSERT_VALUES,localX));
269     CHKERRQ(DMGlobalToLocalEnd(da,X,INSERT_VALUES,localX));
270     /*
271      Get a pointer to vector data.
272      - For default PETSc vectors, VecGetArray() returns a pointer to
273      the data array.  Otherwise, the routine is implementation dependent.
274      - You MUST call VecRestoreArray() when you no longer need access to
275      the array.
276      */
277     CHKERRQ(DMDAVecGetArray(da,localX,&x));
278 
279     /*
280      Get local grid boundaries (for 2-dimensional DMDA):
281      xs, ys   - starting grid indices (no ghost points)
282      xm, ym   - widths of local grid (no ghost points)
283 
284      */
285     CHKERRQ(DMDAGetCorners(da,&xs,&ys,NULL,&xm,&ym,NULL));
286 
287     for (j=ys; j<ys+ym; j++) {
288       for (i=xs; i<xs+xm; i++) {
289         if (i == 0 || j == 0 || i == Mx-1 || j == My-1) {
290           /* boundary conditions are all zero Dirichlet */
291           x[j][i] = 0.0;
292         } else {
293           u   = x[j][i];
294           uxx = (2.0*u - x[j][i-1] - x[j][i+1])*hydhx;
295           uyy = (2.0*u - x[j-1][i] - x[j+1][i])*hxdhy;
296           F   = uxx + uyy;
297           J   = 2.0*(hydhx + hxdhy);
298           u   = u - F/J;
299 
300           x[j][i] = u;
301         }
302       }
303     }
304 
305     /*
306      Restore vector
307      */
308     CHKERRQ(DMDAVecRestoreArray(da,localX,&x));
309     CHKERRQ(DMLocalToGlobalBegin(da,localX,INSERT_VALUES,X));
310     CHKERRQ(DMLocalToGlobalEnd(da,localX,INSERT_VALUES,X));
311   }
312   CHKERRQ(DMRestoreLocalVector(da,&localX));
313   PetscFunctionReturn(0);
314 }
315 
316 /*TEST
317 
318    test:
319       args: -snes_monitor_short -snes_type nrichardson
320       requires: !single
321 
322    test:
323       suffix: 2
324       args: -snes_monitor_short -ksp_monitor_short -ksp_type richardson -pc_type none -ksp_richardson_self_scale
325       requires: !single
326 
327    test:
328       suffix: 3
329       args: -snes_monitor_short -snes_type ngmres
330 
331    test:
332       suffix: 4
333       args: -snes_monitor_short -ksp_type gmres -ksp_monitor_short -pc_type none
334 
335    test:
336       suffix: 5
337       args: -snes_monitor_short -snes_type ncg
338 
339    test:
340       suffix: 6
341       args: -snes_monitor_short -ksp_type cg -ksp_monitor_short -pc_type none
342 
343    test:
344       suffix: 7
345       args: -da_refine 2 -snes_monitor_short -pc_type mg -mg_levels_ksp_type richardson -mg_levels_pc_type none -mg_levels_ksp_monitor_short -mg_levels_ksp_richardson_self_scale -ksp_type richardson -ksp_monitor_short
346       requires: !single
347 
348    test:
349       suffix: 8
350       args: -da_refine 2 -snes_monitor_short -snes_type fas -fas_levels_snes_monitor_short -fas_coarse_snes_type newtonls -fas_coarse_pc_type lu -fas_coarse_ksp_type preonly -snes_type fas -snes_rtol 1.e-5
351 
352    test:
353       suffix: 9
354       args: -snes_monitor_short -ksp_type gmres -ksp_monitor_short -pc_type none -snes_type newtontrdc
355 
356    test:
357       suffix: 10
358       args: -snes_monitor_short -ksp_type gmres -ksp_monitor_short -pc_type none -snes_type newtontrdc -snes_trdc_use_cauchy false
359 
360 TEST*/
361