xref: /petsc/src/snes/tutorials/ex5.c (revision f6b722a57d07b8c3a218465fb7fc5d7800a7778a)
1 static char help[] = "Bratu nonlinear PDE in 2d.\n\
2 We solve the  Bratu (SFI - solid fuel ignition) problem in a 2D rectangular\n\
3 domain, using distributed arrays (DMDAs) to partition the parallel grid.\n\
4 The command line options include:\n\
5   -par <parameter>, where <parameter> indicates the problem's nonlinearity\n\
6      problem SFI:  <parameter> = Bratu parameter (0 <= par <= 6.81)\n\n\
7   -m_par/n_par <parameter>, where <parameter> indicates an integer\n \
8       that MMS3 will be evaluated with 2^m_par, 2^n_par";
9 
10 /*T
11    Concepts: SNES^parallel Bratu example
12    Concepts: DMDA^using distributed arrays;
13    Concepts: IS coloirng types;
14    Processors: n
15 T*/
16 
17 
18 
19 /* ------------------------------------------------------------------------
20 
21     Solid Fuel Ignition (SFI) problem.  This problem is modeled by
22     the partial differential equation
23 
24             -Laplacian u - lambda*exp(u) = 0,  0 < x,y < 1,
25 
26     with boundary conditions
27 
28              u = 0  for  x = 0, x = 1, y = 0, y = 1.
29 
30     A finite difference approximation with the usual 5-point stencil
31     is used to discretize the boundary value problem to obtain a nonlinear
32     system of equations.
33 
34 
35       This example shows how geometric multigrid can be run transparently with a nonlinear solver so long
36       as SNESSetDM() is provided. Example usage
37 
38       ./ex5 -pc_type mg -ksp_monitor  -snes_view -pc_mg_levels 3 -pc_mg_galerkin pmat -da_grid_x 17 -da_grid_y 17
39              -mg_levels_ksp_monitor -snes_monitor -mg_levels_pc_type sor -pc_mg_type full
40 
41       or to run with grid sequencing on the nonlinear problem (note that you do not need to provide the number of
42          multigrid levels, it will be determined automatically based on the number of refinements done)
43 
44       ./ex5 -pc_type mg -ksp_monitor  -snes_view -pc_mg_galerkin pmat -snes_grid_sequence 3
45              -mg_levels_ksp_monitor -snes_monitor -mg_levels_pc_type sor -pc_mg_type full
46 
47   ------------------------------------------------------------------------- */
48 
49 /*
50    Include "petscdmda.h" so that we can use distributed arrays (DMDAs).
51    Include "petscsnes.h" so that we can use SNES solvers.  Note that this
52 */
53 #include <petscdm.h>
54 #include <petscdmda.h>
55 #include <petscsnes.h>
56 #include <petscmatlab.h>
57 #include <petsc/private/snesimpl.h> /* For SNES_Solve event */
58 
59 /*
60    User-defined application context - contains data needed by the
61    application-provided call-back routines, FormJacobianLocal() and
62    FormFunctionLocal().
63 */
64 typedef struct AppCtx AppCtx;
65 struct AppCtx {
66   PetscReal param;          /* test problem parameter */
67   PetscInt  m,n;            /* MMS3 parameters */
68   PetscErrorCode (*mms_solution)(AppCtx*,const DMDACoor2d*,PetscScalar*);
69   PetscErrorCode (*mms_forcing)(AppCtx*,const DMDACoor2d*,PetscScalar*);
70 };
71 
72 /*
73    User-defined routines
74 */
75 extern PetscErrorCode FormInitialGuess(DM,AppCtx*,Vec);
76 extern PetscErrorCode FormFunctionLocal(DMDALocalInfo*,PetscScalar**,PetscScalar**,AppCtx*);
77 extern PetscErrorCode FormExactSolution(DM,AppCtx*,Vec);
78 extern PetscErrorCode ZeroBCSolution(AppCtx*,const DMDACoor2d*,PetscScalar*);
79 extern PetscErrorCode MMSSolution1(AppCtx*,const DMDACoor2d*,PetscScalar*);
80 extern PetscErrorCode MMSForcing1(AppCtx*,const DMDACoor2d*,PetscScalar*);
81 extern PetscErrorCode MMSSolution2(AppCtx*,const DMDACoor2d*,PetscScalar*);
82 extern PetscErrorCode MMSForcing2(AppCtx*,const DMDACoor2d*,PetscScalar*);
83 extern PetscErrorCode MMSSolution3(AppCtx*,const DMDACoor2d*,PetscScalar*);
84 extern PetscErrorCode MMSForcing3(AppCtx*,const DMDACoor2d*,PetscScalar*);
85 extern PetscErrorCode MMSSolution4(AppCtx*,const DMDACoor2d*,PetscScalar*);
86 extern PetscErrorCode MMSForcing4(AppCtx*,const DMDACoor2d*,PetscScalar*);
87 extern PetscErrorCode FormJacobianLocal(DMDALocalInfo*,PetscScalar**,Mat,Mat,AppCtx*);
88 extern PetscErrorCode FormObjectiveLocal(DMDALocalInfo*,PetscScalar**,PetscReal*,AppCtx*);
89 extern PetscErrorCode FormFunctionMatlab(SNES,Vec,Vec,void*);
90 extern PetscErrorCode NonlinearGS(SNES,Vec,Vec,void*);
91 
92 int main(int argc,char **argv)
93 {
94   SNES           snes;                         /* nonlinear solver */
95   Vec            x;                            /* solution vector */
96   AppCtx         user;                         /* user-defined work context */
97   PetscInt       its;                          /* iterations for convergence */
98   PetscErrorCode ierr;
99   PetscReal      bratu_lambda_max = 6.81;
100   PetscReal      bratu_lambda_min = 0.;
101   PetscInt       MMS              = 0;
102   PetscBool      flg              = PETSC_FALSE;
103   DM             da;
104   Vec            r               = NULL;
105   KSP            ksp;
106   PetscInt       lits,slits;
107 
108   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
109      Initialize program
110      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
111 
112   ierr = PetscInitialize(&argc,&argv,(char*)0,help);if (ierr) return ierr;
113 
114   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
115      Initialize problem parameters
116   - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
117   user.param = 6.0;
118   ierr       = PetscOptionsGetReal(NULL,NULL,"-par",&user.param,NULL);CHKERRQ(ierr);
119   if (user.param > bratu_lambda_max || user.param < bratu_lambda_min) SETERRQ3(PETSC_COMM_SELF,1,"Lambda, %g, is out of range, [%g, %g]", user.param, bratu_lambda_min, bratu_lambda_max);
120   ierr       = PetscOptionsGetInt(NULL,NULL,"-mms",&MMS,NULL);CHKERRQ(ierr);
121   if (MMS == 3) {
122     PetscInt mPar = 2, nPar = 1;
123     ierr = PetscOptionsGetInt(NULL,NULL,"-m_par",&mPar,NULL);CHKERRQ(ierr);
124     ierr = PetscOptionsGetInt(NULL,NULL,"-n_par",&nPar,NULL);CHKERRQ(ierr);
125     user.m = PetscPowInt(2,mPar);
126     user.n = PetscPowInt(2,nPar);
127   }
128 
129   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
130      Create nonlinear solver context
131      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
132   ierr = SNESCreate(PETSC_COMM_WORLD,&snes);CHKERRQ(ierr);
133   ierr = SNESSetCountersReset(snes,PETSC_FALSE);CHKERRQ(ierr);
134   ierr = SNESSetNGS(snes, NonlinearGS, NULL);CHKERRQ(ierr);
135 
136   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
137      Create distributed array (DMDA) to manage parallel grid and vectors
138   - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
139   ierr = DMDACreate2d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE,DMDA_STENCIL_STAR,4,4,PETSC_DECIDE,PETSC_DECIDE,1,1,NULL,NULL,&da);CHKERRQ(ierr);
140   ierr = DMSetFromOptions(da);CHKERRQ(ierr);
141   ierr = DMSetUp(da);CHKERRQ(ierr);
142   ierr = DMDASetUniformCoordinates(da, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0);CHKERRQ(ierr);
143   ierr = DMSetApplicationContext(da,&user);CHKERRQ(ierr);
144   ierr = SNESSetDM(snes,da);CHKERRQ(ierr);
145   /*  - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
146      Extract global vectors from DMDA; then duplicate for remaining
147      vectors that are the same types
148    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
149   ierr = DMCreateGlobalVector(da,&x);CHKERRQ(ierr);
150 
151   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
152      Set local function evaluation routine
153   - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
154   user.mms_solution = ZeroBCSolution;
155   switch (MMS) {
156   case 0: user.mms_solution = NULL; user.mms_forcing = NULL;CHKERRQ(ierr);
157   case 1: user.mms_solution = MMSSolution1; user.mms_forcing = MMSForcing1; break;
158   case 2: user.mms_solution = MMSSolution2; user.mms_forcing = MMSForcing2; break;
159   case 3: user.mms_solution = MMSSolution3; user.mms_forcing = MMSForcing3; break;
160   case 4: user.mms_solution = MMSSolution4; user.mms_forcing = MMSForcing4; break;
161   default: SETERRQ1(PETSC_COMM_WORLD,PETSC_ERR_USER,"Unknown MMS type %d",MMS);
162   }
163   ierr = DMDASNESSetFunctionLocal(da,INSERT_VALUES,(DMDASNESFunction)FormFunctionLocal,&user);CHKERRQ(ierr);
164   ierr = PetscOptionsGetBool(NULL,NULL,"-fd",&flg,NULL);CHKERRQ(ierr);
165   if (!flg) {
166     ierr = DMDASNESSetJacobianLocal(da,(DMDASNESJacobian)FormJacobianLocal,&user);CHKERRQ(ierr);
167   }
168 
169   ierr = PetscOptionsGetBool(NULL,NULL,"-obj",&flg,NULL);CHKERRQ(ierr);
170   if (flg) {
171     ierr = DMDASNESSetObjectiveLocal(da,(DMDASNESObjective)FormObjectiveLocal,&user);CHKERRQ(ierr);
172   }
173 
174   if (PetscDefined(HAVE_MATLAB_ENGINE)) {
175     PetscBool matlab_function = PETSC_FALSE;
176     ierr = PetscOptionsGetBool(NULL,NULL,"-matlab_function",&matlab_function,0);CHKERRQ(ierr);
177     if (matlab_function) {
178       ierr = VecDuplicate(x,&r);CHKERRQ(ierr);
179       ierr = SNESSetFunction(snes,r,FormFunctionMatlab,&user);CHKERRQ(ierr);
180     }
181   }
182 
183   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
184      Customize nonlinear solver; set runtime options
185    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
186   ierr = SNESSetFromOptions(snes);CHKERRQ(ierr);
187 
188   ierr = FormInitialGuess(da,&user,x);CHKERRQ(ierr);
189 
190   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
191      Solve nonlinear system
192      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
193   ierr = SNESSolve(snes,NULL,x);CHKERRQ(ierr);
194   ierr = SNESGetIterationNumber(snes,&its);CHKERRQ(ierr);
195 
196   ierr = SNESGetLinearSolveIterations(snes,&slits);CHKERRQ(ierr);
197   ierr = SNESGetKSP(snes,&ksp);CHKERRQ(ierr);
198   ierr = KSPGetTotalIterations(ksp,&lits);CHKERRQ(ierr);
199   if (lits != slits) SETERRQ2(PETSC_COMM_WORLD,PETSC_ERR_PLIB,"Number of total linear iterations reported by SNES %D does not match reported by KSP %D",slits,lits);
200   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
201    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
202 
203   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
204      If using MMS, check the l_2 error
205    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
206   if (MMS) {
207     Vec       e;
208     PetscReal errorl2, errorinf;
209     PetscInt  N;
210 
211     ierr = VecDuplicate(x, &e);CHKERRQ(ierr);
212     ierr = PetscObjectViewFromOptions((PetscObject) x, NULL, "-sol_view");CHKERRQ(ierr);
213     ierr = FormExactSolution(da, &user, e);CHKERRQ(ierr);
214     ierr = PetscObjectViewFromOptions((PetscObject) e, NULL, "-exact_view");CHKERRQ(ierr);
215     ierr = VecAXPY(e, -1.0, x);CHKERRQ(ierr);
216     ierr = PetscObjectViewFromOptions((PetscObject) e, NULL, "-error_view");CHKERRQ(ierr);
217     ierr = VecNorm(e, NORM_2, &errorl2);CHKERRQ(ierr);
218     ierr = VecNorm(e, NORM_INFINITY, &errorinf);CHKERRQ(ierr);
219     ierr = VecGetSize(e, &N);CHKERRQ(ierr);
220     ierr = PetscPrintf(PETSC_COMM_WORLD, "N: %D error L2 %g inf %g\n", N, (double) errorl2/PetscSqrtReal(N), (double) errorinf);CHKERRQ(ierr);
221     ierr = VecDestroy(&e);CHKERRQ(ierr);
222     ierr = PetscLogEventSetDof(SNES_Solve, 0, N);CHKERRQ(ierr);
223     ierr = PetscLogEventSetError(SNES_Solve, 0, errorl2/PetscSqrtReal(N));CHKERRQ(ierr);
224   }
225 
226   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
227      Free work space.  All PETSc objects should be destroyed when they
228      are no longer needed.
229    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
230   ierr = VecDestroy(&r);CHKERRQ(ierr);
231   ierr = VecDestroy(&x);CHKERRQ(ierr);
232   ierr = SNESDestroy(&snes);CHKERRQ(ierr);
233   ierr = DMDestroy(&da);CHKERRQ(ierr);
234   ierr = PetscFinalize();
235   return ierr;
236 }
237 /* ------------------------------------------------------------------- */
238 /*
239    FormInitialGuess - Forms initial approximation.
240 
241    Input Parameters:
242    da - The DM
243    user - user-defined application context
244 
245    Output Parameter:
246    X - vector
247  */
248 PetscErrorCode FormInitialGuess(DM da,AppCtx *user,Vec X)
249 {
250   PetscInt       i,j,Mx,My,xs,ys,xm,ym;
251   PetscErrorCode ierr;
252   PetscReal      lambda,temp1,temp,hx,hy;
253   PetscScalar    **x;
254 
255   PetscFunctionBeginUser;
256   ierr = DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE);CHKERRQ(ierr);
257 
258   lambda = user->param;
259   hx     = 1.0/(PetscReal)(Mx-1);
260   hy     = 1.0/(PetscReal)(My-1);
261   temp1  = lambda/(lambda + 1.0);
262 
263   /*
264      Get a pointer to vector data.
265        - For default PETSc vectors, VecGetArray() returns a pointer to
266          the data array.  Otherwise, the routine is implementation dependent.
267        - You MUST call VecRestoreArray() when you no longer need access to
268          the array.
269   */
270   ierr = DMDAVecGetArray(da,X,&x);CHKERRQ(ierr);
271 
272   /*
273      Get local grid boundaries (for 2-dimensional DMDA):
274        xs, ys   - starting grid indices (no ghost points)
275        xm, ym   - widths of local grid (no ghost points)
276 
277   */
278   ierr = DMDAGetCorners(da,&xs,&ys,NULL,&xm,&ym,NULL);CHKERRQ(ierr);
279 
280   /*
281      Compute initial guess over the locally owned part of the grid
282   */
283   for (j=ys; j<ys+ym; j++) {
284     temp = (PetscReal)(PetscMin(j,My-j-1))*hy;
285     for (i=xs; i<xs+xm; i++) {
286       if (i == 0 || j == 0 || i == Mx-1 || j == My-1) {
287         /* boundary conditions are all zero Dirichlet */
288         x[j][i] = 0.0;
289       } else {
290         x[j][i] = temp1*PetscSqrtReal(PetscMin((PetscReal)(PetscMin(i,Mx-i-1))*hx,temp));
291       }
292     }
293   }
294 
295   /*
296      Restore vector
297   */
298   ierr = DMDAVecRestoreArray(da,X,&x);CHKERRQ(ierr);
299   PetscFunctionReturn(0);
300 }
301 
302 /*
303   FormExactSolution - Forms MMS solution
304 
305   Input Parameters:
306   da - The DM
307   user - user-defined application context
308 
309   Output Parameter:
310   X - vector
311  */
312 PetscErrorCode FormExactSolution(DM da, AppCtx *user, Vec U)
313 {
314   DM             coordDA;
315   Vec            coordinates;
316   DMDACoor2d   **coords;
317   PetscScalar  **u;
318   PetscInt       xs, ys, xm, ym, i, j;
319   PetscErrorCode ierr;
320 
321   PetscFunctionBeginUser;
322   ierr = DMDAGetCorners(da, &xs, &ys, NULL, &xm, &ym, NULL);CHKERRQ(ierr);
323   ierr = DMGetCoordinateDM(da, &coordDA);CHKERRQ(ierr);
324   ierr = DMGetCoordinates(da, &coordinates);CHKERRQ(ierr);
325   ierr = DMDAVecGetArray(coordDA, coordinates, &coords);CHKERRQ(ierr);
326   ierr = DMDAVecGetArray(da, U, &u);CHKERRQ(ierr);
327   for (j = ys; j < ys+ym; ++j) {
328     for (i = xs; i < xs+xm; ++i) {
329       user->mms_solution(user,&coords[j][i],&u[j][i]);
330     }
331   }
332   ierr = DMDAVecRestoreArray(da, U, &u);CHKERRQ(ierr);
333   ierr = DMDAVecRestoreArray(coordDA, coordinates, &coords);CHKERRQ(ierr);
334   PetscFunctionReturn(0);
335 }
336 
337 PetscErrorCode ZeroBCSolution(AppCtx *user,const DMDACoor2d *c,PetscScalar *u)
338 {
339   u[0] = 0.;
340   return 0;
341 }
342 
343 /* The functions below evaluate the MMS solution u(x,y) and associated forcing
344 
345      f(x,y) = -u_xx - u_yy - lambda exp(u)
346 
347   such that u(x,y) is an exact solution with f(x,y) as the right hand side forcing term.
348  */
349 PetscErrorCode MMSSolution1(AppCtx *user,const DMDACoor2d *c,PetscScalar *u)
350 {
351   PetscReal x = PetscRealPart(c->x), y = PetscRealPart(c->y);
352   u[0] = x*(1 - x)*y*(1 - y);
353   PetscLogFlops(5);
354   return 0;
355 }
356 PetscErrorCode MMSForcing1(AppCtx *user,const DMDACoor2d *c,PetscScalar *f)
357 {
358   PetscReal x = PetscRealPart(c->x), y = PetscRealPart(c->y);
359   f[0] = 2*x*(1 - x) + 2*y*(1 - y) - user->param*PetscExpReal(x*(1 - x)*y*(1 - y));
360   return 0;
361 }
362 
363 PetscErrorCode MMSSolution2(AppCtx *user,const DMDACoor2d *c,PetscScalar *u)
364 {
365   PetscReal x = PetscRealPart(c->x), y = PetscRealPart(c->y);
366   u[0] = PetscSinReal(PETSC_PI*x)*PetscSinReal(PETSC_PI*y);
367   PetscLogFlops(5);
368   return 0;
369 }
370 PetscErrorCode MMSForcing2(AppCtx *user,const DMDACoor2d *c,PetscScalar *f)
371 {
372   PetscReal x = PetscRealPart(c->x), y = PetscRealPart(c->y);
373   f[0] = 2*PetscSqr(PETSC_PI)*PetscSinReal(PETSC_PI*x)*PetscSinReal(PETSC_PI*y) - user->param*PetscExpReal(PetscSinReal(PETSC_PI*x)*PetscSinReal(PETSC_PI*y));
374   return 0;
375 }
376 
377 PetscErrorCode MMSSolution3(AppCtx *user,const DMDACoor2d *c,PetscScalar *u)
378 {
379   PetscReal x = PetscRealPart(c->x), y = PetscRealPart(c->y);
380   u[0] = PetscSinReal(user->m*PETSC_PI*x*(1-y))*PetscSinReal(user->n*PETSC_PI*y*(1-x));
381   PetscLogFlops(5);
382   return 0;
383 }
384 PetscErrorCode MMSForcing3(AppCtx *user,const DMDACoor2d *c,PetscScalar *f)
385 {
386   PetscReal x = PetscRealPart(c->x), y = PetscRealPart(c->y);
387   PetscReal m = user->m, n = user->n, lambda = user->param;
388   f[0] = (-(PetscExpReal(PetscSinReal(m*PETSC_PI*x*(1 - y))*PetscSinReal(n*PETSC_PI*(1 - x)*y))*lambda)
389           + PetscSqr(PETSC_PI)*(-2*m*n*((-1 + x)*x + (-1 + y)*y)*PetscCosReal(m*PETSC_PI*x*(-1 + y))*PetscCosReal(n*PETSC_PI*(-1 + x)*y)
390                                 + (PetscSqr(m)*(PetscSqr(x) + PetscSqr(-1 + y)) + PetscSqr(n)*(PetscSqr(-1 + x) + PetscSqr(y)))
391                                 *PetscSinReal(m*PETSC_PI*x*(-1 + y))*PetscSinReal(n*PETSC_PI*(-1 + x)*y)));
392   return 0;
393 }
394 
395 PetscErrorCode MMSSolution4(AppCtx *user,const DMDACoor2d *c,PetscScalar *u)
396 {
397   const PetscReal Lx = 1.,Ly = 1.;
398   PetscReal x = PetscRealPart(c->x), y = PetscRealPart(c->y);
399   u[0] = (PetscPowReal(x,4)-PetscSqr(Lx)*PetscSqr(x))*(PetscPowReal(y,4)-PetscSqr(Ly)*PetscSqr(y));
400   PetscLogFlops(9);
401   return 0;
402 }
403 PetscErrorCode MMSForcing4(AppCtx *user,const DMDACoor2d *c,PetscScalar *f)
404 {
405   const PetscReal Lx = 1.,Ly = 1.;
406   PetscReal x = PetscRealPart(c->x), y = PetscRealPart(c->y);
407   f[0] = (2*PetscSqr(x)*(PetscSqr(x)-PetscSqr(Lx))*(PetscSqr(Ly)-6*PetscSqr(y))
408           + 2*PetscSqr(y)*(PetscSqr(Lx)-6*PetscSqr(x))*(PetscSqr(y)-PetscSqr(Ly))
409           - user->param*PetscExpReal((PetscPowReal(x,4)-PetscSqr(Lx)*PetscSqr(x))*(PetscPowReal(y,4)-PetscSqr(Ly)*PetscSqr(y))));
410   return 0;
411 }
412 
413 /* ------------------------------------------------------------------- */
414 /*
415    FormFunctionLocal - Evaluates nonlinear function, F(x) on local process patch
416 
417 
418  */
419 PetscErrorCode FormFunctionLocal(DMDALocalInfo *info,PetscScalar **x,PetscScalar **f,AppCtx *user)
420 {
421   PetscErrorCode ierr;
422   PetscInt       i,j;
423   PetscReal      lambda,hx,hy,hxdhy,hydhx;
424   PetscScalar    u,ue,uw,un,us,uxx,uyy,mms_solution,mms_forcing;
425   DMDACoor2d     c;
426 
427   PetscFunctionBeginUser;
428   lambda = user->param;
429   hx     = 1.0/(PetscReal)(info->mx-1);
430   hy     = 1.0/(PetscReal)(info->my-1);
431   hxdhy  = hx/hy;
432   hydhx  = hy/hx;
433   /*
434      Compute function over the locally owned part of the grid
435   */
436   for (j=info->ys; j<info->ys+info->ym; j++) {
437     for (i=info->xs; i<info->xs+info->xm; i++) {
438       if (i == 0 || j == 0 || i == info->mx-1 || j == info->my-1) {
439         c.x = i*hx; c.y = j*hy;
440         ierr = user->mms_solution(user,&c,&mms_solution);CHKERRQ(ierr);
441         f[j][i] = 2.0*(hydhx+hxdhy)*(x[j][i] - mms_solution);
442       } else {
443         u  = x[j][i];
444         uw = x[j][i-1];
445         ue = x[j][i+1];
446         un = x[j-1][i];
447         us = x[j+1][i];
448 
449         /* Enforce boundary conditions at neighboring points -- setting these values causes the Jacobian to be symmetric. */
450         if (i-1 == 0) {c.x = (i-1)*hx; c.y = j*hy; ierr = user->mms_solution(user,&c,&uw);CHKERRQ(ierr);}
451         if (i+1 == info->mx-1) {c.x = (i+1)*hx; c.y = j*hy; ierr = user->mms_solution(user,&c,&ue);CHKERRQ(ierr);}
452         if (j-1 == 0) {c.x = i*hx; c.y = (j-1)*hy; ierr = user->mms_solution(user,&c,&un);CHKERRQ(ierr);}
453         if (j+1 == info->my-1) {c.x = i*hx; c.y = (j+1)*hy; ierr = user->mms_solution(user,&c,&us);CHKERRQ(ierr);}
454 
455         uxx     = (2.0*u - uw - ue)*hydhx;
456         uyy     = (2.0*u - un - us)*hxdhy;
457         mms_forcing = 0;
458         c.x = i*hx; c.y = j*hy;
459         if (user->mms_forcing) {ierr = user->mms_forcing(user,&c,&mms_forcing);CHKERRQ(ierr);}
460         f[j][i] = uxx + uyy - hx*hy*(lambda*PetscExpScalar(u) + mms_forcing);
461       }
462     }
463   }
464   ierr = PetscLogFlops(11.0*info->ym*info->xm);CHKERRQ(ierr);
465   PetscFunctionReturn(0);
466 }
467 
468 /* FormObjectiveLocal - Evaluates nonlinear function, F(x) on local process patch */
469 PetscErrorCode FormObjectiveLocal(DMDALocalInfo *info,PetscScalar **x,PetscReal *obj,AppCtx *user)
470 {
471   PetscErrorCode ierr;
472   PetscInt       i,j;
473   PetscReal      lambda,hx,hy,hxdhy,hydhx,sc,lobj=0;
474   PetscScalar    u,ue,uw,un,us,uxux,uyuy;
475   MPI_Comm       comm;
476 
477   PetscFunctionBeginUser;
478   *obj   = 0;
479   ierr = PetscObjectGetComm((PetscObject)info->da,&comm);CHKERRQ(ierr);
480   lambda = user->param;
481   hx     = 1.0/(PetscReal)(info->mx-1);
482   hy     = 1.0/(PetscReal)(info->my-1);
483   sc     = hx*hy*lambda;
484   hxdhy  = hx/hy;
485   hydhx  = hy/hx;
486   /*
487      Compute function over the locally owned part of the grid
488   */
489   for (j=info->ys; j<info->ys+info->ym; j++) {
490     for (i=info->xs; i<info->xs+info->xm; i++) {
491       if (i == 0 || j == 0 || i == info->mx-1 || j == info->my-1) {
492         lobj += PetscRealPart((hydhx + hxdhy)*x[j][i]*x[j][i]);
493       } else {
494         u  = x[j][i];
495         uw = x[j][i-1];
496         ue = x[j][i+1];
497         un = x[j-1][i];
498         us = x[j+1][i];
499 
500         if (i-1 == 0) uw = 0.;
501         if (i+1 == info->mx-1) ue = 0.;
502         if (j-1 == 0) un = 0.;
503         if (j+1 == info->my-1) us = 0.;
504 
505         /* F[u] = 1/2\int_{\omega}\nabla^2u(x)*u(x)*dx */
506 
507         uxux = u*(2.*u - ue - uw)*hydhx;
508         uyuy = u*(2.*u - un - us)*hxdhy;
509 
510         lobj += PetscRealPart(0.5*(uxux + uyuy) - sc*PetscExpScalar(u));
511       }
512     }
513   }
514   ierr = PetscLogFlops(12.0*info->ym*info->xm);CHKERRQ(ierr);
515   ierr = MPI_Allreduce(&lobj,obj,1,MPIU_REAL,MPIU_SUM,comm);CHKERRMPI(ierr);
516   PetscFunctionReturn(0);
517 }
518 
519 /*
520    FormJacobianLocal - Evaluates Jacobian matrix on local process patch
521 */
522 PetscErrorCode FormJacobianLocal(DMDALocalInfo *info,PetscScalar **x,Mat jac,Mat jacpre,AppCtx *user)
523 {
524   PetscErrorCode ierr;
525   PetscInt       i,j,k;
526   MatStencil     col[5],row;
527   PetscScalar    lambda,v[5],hx,hy,hxdhy,hydhx,sc;
528   DM             coordDA;
529   Vec            coordinates;
530   DMDACoor2d   **coords;
531 
532   PetscFunctionBeginUser;
533   lambda = user->param;
534   /* Extract coordinates */
535   ierr = DMGetCoordinateDM(info->da, &coordDA);CHKERRQ(ierr);
536   ierr = DMGetCoordinates(info->da, &coordinates);CHKERRQ(ierr);
537   ierr = DMDAVecGetArray(coordDA, coordinates, &coords);CHKERRQ(ierr);
538   hx     = info->xm > 1 ? PetscRealPart(coords[info->ys][info->xs+1].x) - PetscRealPart(coords[info->ys][info->xs].x) : 1.0;
539   hy     = info->ym > 1 ? PetscRealPart(coords[info->ys+1][info->xs].y) - PetscRealPart(coords[info->ys][info->xs].y) : 1.0;
540   ierr = DMDAVecRestoreArray(coordDA, coordinates, &coords);CHKERRQ(ierr);
541   hxdhy  = hx/hy;
542   hydhx  = hy/hx;
543   sc     = hx*hy*lambda;
544 
545 
546   /*
547      Compute entries for the locally owned part of the Jacobian.
548       - Currently, all PETSc parallel matrix formats are partitioned by
549         contiguous chunks of rows across the processors.
550       - Each processor needs to insert only elements that it owns
551         locally (but any non-local elements will be sent to the
552         appropriate processor during matrix assembly).
553       - Here, we set all entries for a particular row at once.
554       - We can set matrix entries either using either
555         MatSetValuesLocal() or MatSetValues(), as discussed above.
556   */
557   for (j=info->ys; j<info->ys+info->ym; j++) {
558     for (i=info->xs; i<info->xs+info->xm; i++) {
559       row.j = j; row.i = i;
560       /* boundary points */
561       if (i == 0 || j == 0 || i == info->mx-1 || j == info->my-1) {
562         v[0] =  2.0*(hydhx + hxdhy);
563         ierr = MatSetValuesStencil(jacpre,1,&row,1,&row,v,INSERT_VALUES);CHKERRQ(ierr);
564       } else {
565         k = 0;
566         /* interior grid points */
567         if (j-1 != 0) {
568           v[k]     = -hxdhy;
569           col[k].j = j - 1; col[k].i = i;
570           k++;
571         }
572         if (i-1 != 0) {
573           v[k]     = -hydhx;
574           col[k].j = j;     col[k].i = i-1;
575           k++;
576         }
577 
578         v[k] = 2.0*(hydhx + hxdhy) - sc*PetscExpScalar(x[j][i]); col[k].j = row.j; col[k].i = row.i; k++;
579 
580         if (i+1 != info->mx-1) {
581           v[k]     = -hydhx;
582           col[k].j = j;     col[k].i = i+1;
583           k++;
584         }
585         if (j+1 != info->mx-1) {
586           v[k]     = -hxdhy;
587           col[k].j = j + 1; col[k].i = i;
588           k++;
589         }
590         ierr = MatSetValuesStencil(jacpre,1,&row,k,col,v,INSERT_VALUES);CHKERRQ(ierr);
591       }
592     }
593   }
594 
595   /*
596      Assemble matrix, using the 2-step process:
597        MatAssemblyBegin(), MatAssemblyEnd().
598   */
599   ierr = MatAssemblyBegin(jacpre,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
600   ierr = MatAssemblyEnd(jacpre,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
601   /*
602      Tell the matrix we will never add a new nonzero location to the
603      matrix. If we do, it will generate an error.
604   */
605   ierr = MatSetOption(jac,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE);CHKERRQ(ierr);
606   PetscFunctionReturn(0);
607 }
608 
609 PetscErrorCode FormFunctionMatlab(SNES snes,Vec X,Vec F,void *ptr)
610 {
611 #if PetscDefined(HAVE_MATLAB_ENGINE)
612   AppCtx         *user = (AppCtx*)ptr;
613   PetscErrorCode ierr;
614   PetscInt       Mx,My;
615   PetscReal      lambda,hx,hy;
616   Vec            localX,localF;
617   MPI_Comm       comm;
618   DM             da;
619 
620   PetscFunctionBeginUser;
621   ierr = SNESGetDM(snes,&da);CHKERRQ(ierr);
622   ierr = DMGetLocalVector(da,&localX);CHKERRQ(ierr);
623   ierr = DMGetLocalVector(da,&localF);CHKERRQ(ierr);
624   ierr = PetscObjectSetName((PetscObject)localX,"localX");CHKERRQ(ierr);
625   ierr = PetscObjectSetName((PetscObject)localF,"localF");CHKERRQ(ierr);
626   ierr = DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE);CHKERRQ(ierr);
627 
628   lambda = user->param;
629   hx     = 1.0/(PetscReal)(Mx-1);
630   hy     = 1.0/(PetscReal)(My-1);
631 
632   ierr = PetscObjectGetComm((PetscObject)snes,&comm);CHKERRQ(ierr);
633   /*
634      Scatter ghost points to local vector,using the 2-step process
635         DMGlobalToLocalBegin(),DMGlobalToLocalEnd().
636      By placing code between these two statements, computations can be
637      done while messages are in transition.
638   */
639   ierr = DMGlobalToLocalBegin(da,X,INSERT_VALUES,localX);CHKERRQ(ierr);
640   ierr = DMGlobalToLocalEnd(da,X,INSERT_VALUES,localX);CHKERRQ(ierr);
641   ierr = PetscMatlabEnginePut(PETSC_MATLAB_ENGINE_(comm),(PetscObject)localX);CHKERRQ(ierr);
642   ierr = PetscMatlabEngineEvaluate(PETSC_MATLAB_ENGINE_(comm),"localF=ex5m(localX,%18.16e,%18.16e,%18.16e)",hx,hy,lambda);CHKERRQ(ierr);
643   ierr = PetscMatlabEngineGet(PETSC_MATLAB_ENGINE_(comm),(PetscObject)localF);CHKERRQ(ierr);
644 
645   /*
646      Insert values into global vector
647   */
648   ierr = DMLocalToGlobalBegin(da,localF,INSERT_VALUES,F);CHKERRQ(ierr);
649   ierr = DMLocalToGlobalEnd(da,localF,INSERT_VALUES,F);CHKERRQ(ierr);
650   ierr = DMRestoreLocalVector(da,&localX);CHKERRQ(ierr);
651   ierr = DMRestoreLocalVector(da,&localF);CHKERRQ(ierr);
652   PetscFunctionReturn(0);
653 #else
654     return 0;                     /* Never called */
655 #endif
656 }
657 
658 /* ------------------------------------------------------------------- */
659 /*
660       Applies some sweeps on nonlinear Gauss-Seidel on each process
661 
662  */
663 PetscErrorCode NonlinearGS(SNES snes,Vec X, Vec B, void *ctx)
664 {
665   PetscInt       i,j,k,Mx,My,xs,ys,xm,ym,its,tot_its,sweeps,l;
666   PetscErrorCode ierr;
667   PetscReal      lambda,hx,hy,hxdhy,hydhx,sc;
668   PetscScalar    **x,**b,bij,F,F0=0,J,u,un,us,ue,eu,uw,uxx,uyy,y;
669   PetscReal      atol,rtol,stol;
670   DM             da;
671   AppCtx         *user;
672   Vec            localX,localB;
673 
674   PetscFunctionBeginUser;
675   tot_its = 0;
676   ierr    = SNESNGSGetSweeps(snes,&sweeps);CHKERRQ(ierr);
677   ierr    = SNESNGSGetTolerances(snes,&atol,&rtol,&stol,&its);CHKERRQ(ierr);
678   ierr    = SNESGetDM(snes,&da);CHKERRQ(ierr);
679   ierr    = DMGetApplicationContext(da,(void**)&user);CHKERRQ(ierr);
680 
681   ierr = DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE);CHKERRQ(ierr);
682 
683   lambda = user->param;
684   hx     = 1.0/(PetscReal)(Mx-1);
685   hy     = 1.0/(PetscReal)(My-1);
686   sc     = hx*hy*lambda;
687   hxdhy  = hx/hy;
688   hydhx  = hy/hx;
689 
690 
691   ierr = DMGetLocalVector(da,&localX);CHKERRQ(ierr);
692   if (B) {
693     ierr = DMGetLocalVector(da,&localB);CHKERRQ(ierr);
694   }
695   for (l=0; l<sweeps; l++) {
696 
697     ierr = DMGlobalToLocalBegin(da,X,INSERT_VALUES,localX);CHKERRQ(ierr);
698     ierr = DMGlobalToLocalEnd(da,X,INSERT_VALUES,localX);CHKERRQ(ierr);
699     if (B) {
700       ierr = DMGlobalToLocalBegin(da,B,INSERT_VALUES,localB);CHKERRQ(ierr);
701       ierr = DMGlobalToLocalEnd(da,B,INSERT_VALUES,localB);CHKERRQ(ierr);
702     }
703     /*
704      Get a pointer to vector data.
705      - For default PETSc vectors, VecGetArray() returns a pointer to
706      the data array.  Otherwise, the routine is implementation dependent.
707      - You MUST call VecRestoreArray() when you no longer need access to
708      the array.
709      */
710     ierr = DMDAVecGetArray(da,localX,&x);CHKERRQ(ierr);
711     if (B) ierr = DMDAVecGetArray(da,localB,&b);CHKERRQ(ierr);
712     /*
713      Get local grid boundaries (for 2-dimensional DMDA):
714      xs, ys   - starting grid indices (no ghost points)
715      xm, ym   - widths of local grid (no ghost points)
716      */
717     ierr = DMDAGetCorners(da,&xs,&ys,NULL,&xm,&ym,NULL);CHKERRQ(ierr);
718 
719     for (j=ys; j<ys+ym; j++) {
720       for (i=xs; i<xs+xm; i++) {
721         if (i == 0 || j == 0 || i == Mx-1 || j == My-1) {
722           /* boundary conditions are all zero Dirichlet */
723           x[j][i] = 0.0;
724         } else {
725           if (B) bij = b[j][i];
726           else   bij = 0.;
727 
728           u  = x[j][i];
729           un = x[j-1][i];
730           us = x[j+1][i];
731           ue = x[j][i-1];
732           uw = x[j][i+1];
733 
734           for (k=0; k<its; k++) {
735             eu  = PetscExpScalar(u);
736             uxx = (2.0*u - ue - uw)*hydhx;
737             uyy = (2.0*u - un - us)*hxdhy;
738             F   = uxx + uyy - sc*eu - bij;
739             if (k == 0) F0 = F;
740             J  = 2.0*(hydhx + hxdhy) - sc*eu;
741             y  = F/J;
742             u -= y;
743             tot_its++;
744 
745             if (atol > PetscAbsReal(PetscRealPart(F)) ||
746                 rtol*PetscAbsReal(PetscRealPart(F0)) > PetscAbsReal(PetscRealPart(F)) ||
747                 stol*PetscAbsReal(PetscRealPart(u)) > PetscAbsReal(PetscRealPart(y))) {
748               break;
749             }
750           }
751           x[j][i] = u;
752         }
753       }
754     }
755     /*
756      Restore vector
757      */
758     ierr = DMDAVecRestoreArray(da,localX,&x);CHKERRQ(ierr);
759     ierr = DMLocalToGlobalBegin(da,localX,INSERT_VALUES,X);CHKERRQ(ierr);
760     ierr = DMLocalToGlobalEnd(da,localX,INSERT_VALUES,X);CHKERRQ(ierr);
761   }
762   ierr = PetscLogFlops(tot_its*(21.0));CHKERRQ(ierr);
763   ierr = DMRestoreLocalVector(da,&localX);CHKERRQ(ierr);
764   if (B) {
765     ierr = DMDAVecRestoreArray(da,localB,&b);CHKERRQ(ierr);
766     ierr = DMRestoreLocalVector(da,&localB);CHKERRQ(ierr);
767   }
768   PetscFunctionReturn(0);
769 }
770 
771 /*TEST
772 
773    test:
774      suffix: asm_0
775      requires: !single
776      args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 2 -pc_asm_overlap 0 -pc_asm_local_type additive -sub_pc_type lu
777 
778    test:
779      suffix: msm_0
780      requires: !single
781      args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 2 -pc_asm_overlap 0 -pc_asm_local_type multiplicative -sub_pc_type lu
782 
783    test:
784      suffix: asm_1
785      requires: !single
786      args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 2 -pc_asm_overlap 0 -pc_asm_local_type additive -sub_pc_type lu -da_grid_x 8
787 
788    test:
789      suffix: msm_1
790      requires: !single
791      args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 2 -pc_asm_overlap 0 -pc_asm_local_type multiplicative -sub_pc_type lu -da_grid_x 8
792 
793    test:
794      suffix: asm_2
795      requires: !single
796      args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 3 -pc_asm_overlap 0 -pc_asm_local_type additive -sub_pc_type lu -da_grid_x 8
797 
798    test:
799      suffix: msm_2
800      requires: !single
801      args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 3 -pc_asm_overlap 0 -pc_asm_local_type multiplicative -sub_pc_type lu -da_grid_x 8
802 
803    test:
804      suffix: asm_3
805      requires: !single
806      args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 4 -pc_asm_overlap 0 -pc_asm_local_type additive -sub_pc_type lu -da_grid_x 8
807 
808    test:
809      suffix: msm_3
810      requires: !single
811      args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 4 -pc_asm_overlap 0 -pc_asm_local_type multiplicative -sub_pc_type lu -da_grid_x 8
812 
813    test:
814      suffix: asm_4
815      requires: !single
816      nsize: 2
817      args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 2 -pc_asm_overlap 0 -pc_asm_local_type additive -sub_pc_type lu -da_grid_x 8
818 
819    test:
820      suffix: msm_4
821      requires: !single
822      nsize: 2
823      args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 2 -pc_asm_overlap 0 -pc_asm_local_type multiplicative -sub_pc_type lu -da_grid_x 8
824 
825    test:
826      suffix: asm_5
827      requires: !single
828      nsize: 2
829      args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 4 -pc_asm_overlap 0 -pc_asm_local_type additive -sub_pc_type lu -da_grid_x 8
830 
831    test:
832      suffix: msm_5
833      requires: !single
834      nsize: 2
835      args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 4 -pc_asm_overlap 0 -pc_asm_local_type multiplicative -sub_pc_type lu -da_grid_x 8
836 
837    test:
838      args: -snes_rtol 1.e-5 -pc_type mg -ksp_monitor_short -snes_view -pc_mg_levels 3 -pc_mg_galerkin pmat -da_grid_x 17 -da_grid_y 17 -mg_levels_ksp_monitor_short -mg_levels_ksp_norm_type unpreconditioned -snes_monitor_short -mg_levels_ksp_chebyshev_esteig 0.5,1.1 -mg_levels_pc_type sor -pc_mg_type full
839      requires: !single
840 
841    test:
842      suffix: 2
843      args: -pc_type mg -ksp_converged_reason -snes_view -pc_mg_galerkin pmat -snes_grid_sequence 3 -mg_levels_ksp_norm_type unpreconditioned -snes_monitor_short -mg_levels_ksp_chebyshev_esteig 0.5,1.1 -mg_levels_pc_type sor -pc_mg_type full -ksp_atol -1.
844 
845    test:
846      suffix: 3
847      nsize: 2
848      args: -snes_grid_sequence 2 -snes_mf_operator -snes_converged_reason -snes_view -pc_type mg -snes_atol -1 -snes_rtol 1.e-2
849      filter: grep -v "otal number of function evaluations"
850 
851    test:
852      suffix: 4
853      nsize: 2
854      args: -snes_grid_sequence 2 -snes_monitor_short -ksp_converged_reason -snes_converged_reason -snes_view -pc_type mg -snes_atol -1 -ksp_atol -1
855 
856    test:
857      suffix: 5_anderson
858      args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type anderson
859 
860    test:
861      suffix: 5_aspin
862      nsize: 4
863      args: -snes_monitor_short -ksp_monitor_short -snes_converged_reason -da_refine 4 -da_overlap 3 -snes_type aspin -snes_view
864 
865    test:
866      suffix: 5_broyden
867      args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type qn -snes_qn_type broyden -snes_qn_m 10
868 
869    test:
870      suffix: 5_fas
871      args: -fas_coarse_snes_max_it 1 -fas_coarse_pc_type lu -fas_coarse_ksp_type preonly -snes_monitor_short -snes_type fas -fas_coarse_ksp_type richardson -da_refine 6
872      requires: !single
873 
874    test:
875      suffix: 5_fas_additive
876      args: -fas_coarse_snes_max_it 1 -fas_coarse_pc_type lu -fas_coarse_ksp_type preonly -snes_monitor_short -snes_type fas -fas_coarse_ksp_type richardson -da_refine 6 -snes_fas_type additive -snes_max_it 50
877 
878    test:
879      suffix: 5_fas_monitor
880      args: -da_refine 1 -snes_type fas -snes_fas_monitor
881      requires: !single
882 
883    test:
884      suffix: 5_ls
885      args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type newtonls
886 
887    test:
888      suffix: 5_ls_sell_sor
889      args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type newtonls -dm_mat_type sell -pc_type sor
890      output_file: output/ex5_5_ls.out
891 
892    test:
893      suffix: 5_nasm
894      nsize: 4
895      args: -snes_monitor_short -snes_converged_reason -da_refine 4 -da_overlap 3 -snes_type nasm -snes_nasm_type restrict -snes_max_it 10
896 
897    test:
898      suffix: 5_ncg
899      args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type ncg -snes_ncg_type fr
900 
901    test:
902      suffix: 5_newton_asm_dmda
903      nsize: 4
904      args: -snes_monitor_short -ksp_monitor_short -snes_converged_reason -da_refine 4 -da_overlap 3 -snes_type newtonls -pc_type asm -pc_asm_dm_subdomains -malloc_dump
905      requires: !single
906 
907    test:
908      suffix: 5_newton_gasm_dmda
909      nsize: 4
910      args: -snes_monitor_short -ksp_monitor_short -snes_converged_reason -da_refine 4 -da_overlap 3 -snes_type newtonls -pc_type gasm -malloc_dump
911      requires: !single
912 
913    test:
914      suffix: 5_ngmres
915      args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type ngmres -snes_ngmres_m 10
916 
917    test:
918      suffix: 5_ngmres_fas
919      args: -snes_rtol 1.e-4 -snes_type ngmres -npc_fas_coarse_snes_max_it 1 -npc_fas_coarse_snes_type newtonls -npc_fas_coarse_pc_type lu -npc_fas_coarse_ksp_type preonly -snes_ngmres_m 10 -snes_monitor_short -npc_snes_max_it 1 -npc_snes_type fas -npc_fas_coarse_ksp_type richardson -da_refine 6
920 
921    test:
922      suffix: 5_ngmres_ngs
923      args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type ngmres -npc_snes_type ngs -npc_snes_max_it 1
924 
925    test:
926      suffix: 5_ngmres_nrichardson
927      args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type ngmres -snes_ngmres_m 10 -npc_snes_type nrichardson -npc_snes_max_it 3
928      output_file: output/ex5_5_ngmres_richardson.out
929 
930    test:
931      suffix: 5_nrichardson
932      args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type nrichardson
933 
934    test:
935      suffix: 5_qn
936      args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type qn -snes_linesearch_type cp -snes_qn_m 10
937 
938    test:
939      suffix: 6
940      nsize: 4
941      args: -snes_converged_reason -ksp_converged_reason -da_grid_x 129 -da_grid_y 129 -pc_type mg -pc_mg_levels 8 -mg_levels_ksp_type chebyshev -mg_levels_ksp_chebyshev_esteig 0,0.5,0,1.1 -mg_levels_ksp_max_it 2
942 
943    test:
944      requires: complex !single
945      suffix: complex
946      args: -snes_mf_operator -mat_mffd_complex -snes_monitor
947 
948 TEST*/
949