xref: /petsc/src/snes/tutorials/ex5.c (revision 6a98f8dc3f2c9149905a87dc2e9d0fedaf64e09a)
1 
2 static char help[] = "Bratu nonlinear PDE in 2d.\n\
3 We solve the  Bratu (SFI - solid fuel ignition) problem in a 2D rectangular\n\
4 domain, using distributed arrays (DMDAs) to partition the parallel grid.\n\
5 The command line options include:\n\
6   -par <parameter>, where <parameter> indicates the problem's nonlinearity\n\
7      problem SFI:  <parameter> = Bratu parameter (0 <= par <= 6.81)\n\n\
8   -m_par/n_par <parameter>, where <parameter> indicates an integer\n \
9       that MMS3 will be evaluated with 2^m_par, 2^n_par";
10 
11 /*T
12    Concepts: SNES^parallel Bratu example
13    Concepts: DMDA^using distributed arrays;
14    Concepts: IS coloirng types;
15    Processors: n
16 T*/
17 
18 
19 
20 /* ------------------------------------------------------------------------
21 
22     Solid Fuel Ignition (SFI) problem.  This problem is modeled by
23     the partial differential equation
24 
25             -Laplacian u - lambda*exp(u) = 0,  0 < x,y < 1,
26 
27     with boundary conditions
28 
29              u = 0  for  x = 0, x = 1, y = 0, y = 1.
30 
31     A finite difference approximation with the usual 5-point stencil
32     is used to discretize the boundary value problem to obtain a nonlinear
33     system of equations.
34 
35 
36       This example shows how geometric multigrid can be run transparently with a nonlinear solver so long
37       as SNESSetDM() is provided. Example usage
38 
39       ./ex5 -pc_type mg -ksp_monitor  -snes_view -pc_mg_levels 3 -pc_mg_galerkin pmat -da_grid_x 17 -da_grid_y 17
40              -mg_levels_ksp_monitor -snes_monitor -mg_levels_pc_type sor -pc_mg_type full
41 
42       or to run with grid sequencing on the nonlinear problem (note that you do not need to provide the number of
43          multigrid levels, it will be determined automatically based on the number of refinements done)
44 
45       ./ex5 -pc_type mg -ksp_monitor  -snes_view -pc_mg_galerkin pmat -snes_grid_sequence 3
46              -mg_levels_ksp_monitor -snes_monitor -mg_levels_pc_type sor -pc_mg_type full
47 
48   ------------------------------------------------------------------------- */
49 
50 /*
51    Include "petscdmda.h" so that we can use distributed arrays (DMDAs).
52    Include "petscsnes.h" so that we can use SNES solvers.  Note that this
53 */
54 #include <petscdm.h>
55 #include <petscdmda.h>
56 #include <petscsnes.h>
57 #include <petscmatlab.h>
58 #include <petsc/private/snesimpl.h> /* For SNES_Solve event */
59 
60 /*
61    User-defined application context - contains data needed by the
62    application-provided call-back routines, FormJacobianLocal() and
63    FormFunctionLocal().
64 */
65 typedef struct AppCtx AppCtx;
66 struct AppCtx {
67   PetscReal param;          /* test problem parameter */
68   PetscInt  m,n;            /* MMS3 parameters */
69   PetscErrorCode (*mms_solution)(AppCtx*,const DMDACoor2d*,PetscScalar*);
70   PetscErrorCode (*mms_forcing)(AppCtx*,const DMDACoor2d*,PetscScalar*);
71 };
72 
73 /*
74    User-defined routines
75 */
76 extern PetscErrorCode FormInitialGuess(DM,AppCtx*,Vec);
77 extern PetscErrorCode FormFunctionLocal(DMDALocalInfo*,PetscScalar**,PetscScalar**,AppCtx*);
78 extern PetscErrorCode FormExactSolution(DM,AppCtx*,Vec);
79 extern PetscErrorCode ZeroBCSolution(AppCtx*,const DMDACoor2d*,PetscScalar*);
80 extern PetscErrorCode MMSSolution1(AppCtx*,const DMDACoor2d*,PetscScalar*);
81 extern PetscErrorCode MMSForcing1(AppCtx*,const DMDACoor2d*,PetscScalar*);
82 extern PetscErrorCode MMSSolution2(AppCtx*,const DMDACoor2d*,PetscScalar*);
83 extern PetscErrorCode MMSForcing2(AppCtx*,const DMDACoor2d*,PetscScalar*);
84 extern PetscErrorCode MMSSolution3(AppCtx*,const DMDACoor2d*,PetscScalar*);
85 extern PetscErrorCode MMSForcing3(AppCtx*,const DMDACoor2d*,PetscScalar*);
86 extern PetscErrorCode MMSSolution4(AppCtx*,const DMDACoor2d*,PetscScalar*);
87 extern PetscErrorCode MMSForcing4(AppCtx*,const DMDACoor2d*,PetscScalar*);
88 extern PetscErrorCode FormJacobianLocal(DMDALocalInfo*,PetscScalar**,Mat,Mat,AppCtx*);
89 extern PetscErrorCode FormObjectiveLocal(DMDALocalInfo*,PetscScalar**,PetscReal*,AppCtx*);
90 #if defined(PETSC_HAVE_MATLAB_ENGINE)
91 extern PetscErrorCode FormFunctionMatlab(SNES,Vec,Vec,void*);
92 #endif
93 extern PetscErrorCode NonlinearGS(SNES,Vec,Vec,void*);
94 
95 int main(int argc,char **argv)
96 {
97   SNES           snes;                         /* nonlinear solver */
98   Vec            x;                            /* solution vector */
99   AppCtx         user;                         /* user-defined work context */
100   PetscInt       its;                          /* iterations for convergence */
101   PetscErrorCode ierr;
102   PetscReal      bratu_lambda_max = 6.81;
103   PetscReal      bratu_lambda_min = 0.;
104   PetscInt       MMS              = 0;
105   PetscBool      flg              = PETSC_FALSE;
106   DM             da;
107 #if defined(PETSC_HAVE_MATLAB_ENGINE)
108   Vec            r               = NULL;
109   PetscBool      matlab_function = PETSC_FALSE;
110 #endif
111   KSP            ksp;
112   PetscInt       lits,slits;
113 
114   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
115      Initialize program
116      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
117 
118   ierr = PetscInitialize(&argc,&argv,(char*)0,help);if (ierr) return ierr;
119 
120   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
121      Initialize problem parameters
122   - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
123   user.param = 6.0;
124   ierr       = PetscOptionsGetReal(NULL,NULL,"-par",&user.param,NULL);CHKERRQ(ierr);
125   if (user.param > bratu_lambda_max || user.param < bratu_lambda_min) SETERRQ3(PETSC_COMM_SELF,1,"Lambda, %g, is out of range, [%g, %g]", user.param, bratu_lambda_min, bratu_lambda_max);
126   ierr       = PetscOptionsGetInt(NULL,NULL,"-mms",&MMS,NULL);CHKERRQ(ierr);
127   if (MMS == 3) {
128     PetscInt mPar = 2, nPar = 1;
129     ierr = PetscOptionsGetInt(NULL,NULL,"-m_par",&mPar,NULL);CHKERRQ(ierr);
130     ierr = PetscOptionsGetInt(NULL,NULL,"-n_par",&nPar,NULL);CHKERRQ(ierr);
131     user.m = PetscPowInt(2,mPar);
132     user.n = PetscPowInt(2,nPar);
133   }
134 
135   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
136      Create nonlinear solver context
137      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
138   ierr = SNESCreate(PETSC_COMM_WORLD,&snes);CHKERRQ(ierr);
139   ierr = SNESSetCountersReset(snes,PETSC_FALSE);CHKERRQ(ierr);
140   ierr = SNESSetNGS(snes, NonlinearGS, NULL);CHKERRQ(ierr);
141 
142   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
143      Create distributed array (DMDA) to manage parallel grid and vectors
144   - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
145   ierr = DMDACreate2d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE,DMDA_STENCIL_STAR,4,4,PETSC_DECIDE,PETSC_DECIDE,1,1,NULL,NULL,&da);CHKERRQ(ierr);
146   ierr = DMSetFromOptions(da);CHKERRQ(ierr);
147   ierr = DMSetUp(da);CHKERRQ(ierr);
148   ierr = DMDASetUniformCoordinates(da, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0);CHKERRQ(ierr);
149   ierr = DMSetApplicationContext(da,&user);CHKERRQ(ierr);
150   ierr = SNESSetDM(snes,da);CHKERRQ(ierr);
151   /*  - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
152      Extract global vectors from DMDA; then duplicate for remaining
153      vectors that are the same types
154    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
155   ierr = DMCreateGlobalVector(da,&x);CHKERRQ(ierr);
156 
157   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
158      Set local function evaluation routine
159   - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
160   user.mms_solution = ZeroBCSolution;
161   switch (MMS) {
162   case 0: user.mms_solution = NULL; user.mms_forcing = NULL; CHKERRQ(ierr);
163   case 1: user.mms_solution = MMSSolution1; user.mms_forcing = MMSForcing1; break;
164   case 2: user.mms_solution = MMSSolution2; user.mms_forcing = MMSForcing2; break;
165   case 3: user.mms_solution = MMSSolution3; user.mms_forcing = MMSForcing3; break;
166   case 4: user.mms_solution = MMSSolution4; user.mms_forcing = MMSForcing4; break;
167   default: SETERRQ1(PETSC_COMM_WORLD,PETSC_ERR_USER,"Unknown MMS type %d",MMS);
168   }
169   ierr = DMDASNESSetFunctionLocal(da,INSERT_VALUES,(DMDASNESFunction)FormFunctionLocal,&user);CHKERRQ(ierr);
170   ierr = PetscOptionsGetBool(NULL,NULL,"-fd",&flg,NULL);CHKERRQ(ierr);
171   if (!flg) {
172     ierr = DMDASNESSetJacobianLocal(da,(DMDASNESJacobian)FormJacobianLocal,&user);CHKERRQ(ierr);
173   }
174 
175   ierr = PetscOptionsGetBool(NULL,NULL,"-obj",&flg,NULL);CHKERRQ(ierr);
176   if (flg) {
177     ierr = DMDASNESSetObjectiveLocal(da,(DMDASNESObjective)FormObjectiveLocal,&user);CHKERRQ(ierr);
178   }
179 
180 #if defined(PETSC_HAVE_MATLAB_ENGINE)
181   ierr = PetscOptionsGetBool(NULL,NULL,"-matlab_function",&matlab_function,0);CHKERRQ(ierr);
182   if (matlab_function) {
183     ierr = VecDuplicate(x,&r);CHKERRQ(ierr);
184     ierr = SNESSetFunction(snes,r,FormFunctionMatlab,&user);CHKERRQ(ierr);
185   }
186 #endif
187 
188   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
189      Customize nonlinear solver; set runtime options
190    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
191   ierr = SNESSetFromOptions(snes);CHKERRQ(ierr);
192 
193   ierr = FormInitialGuess(da,&user,x);CHKERRQ(ierr);
194 
195   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
196      Solve nonlinear system
197      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
198   ierr = SNESSolve(snes,NULL,x);CHKERRQ(ierr);
199   ierr = SNESGetIterationNumber(snes,&its);CHKERRQ(ierr);
200 
201   ierr = SNESGetLinearSolveIterations(snes,&slits);CHKERRQ(ierr);
202   ierr = SNESGetKSP(snes,&ksp);CHKERRQ(ierr);
203   ierr = KSPGetTotalIterations(ksp,&lits);CHKERRQ(ierr);
204   if (lits != slits) SETERRQ2(PETSC_COMM_WORLD,PETSC_ERR_PLIB,"Number of total linear iterations reported by SNES %D does not match reported by KSP %D",slits,lits);
205   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
206    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
207 
208   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
209      If using MMS, check the l_2 error
210    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
211   if (MMS) {
212     Vec       e;
213     PetscReal errorl2, errorinf;
214     PetscInt  N;
215 
216     ierr = VecDuplicate(x, &e);CHKERRQ(ierr);
217     ierr = PetscObjectViewFromOptions((PetscObject) x, NULL, "-sol_view");CHKERRQ(ierr);
218     ierr = FormExactSolution(da, &user, e);CHKERRQ(ierr);
219     ierr = PetscObjectViewFromOptions((PetscObject) e, NULL, "-exact_view");CHKERRQ(ierr);
220     ierr = VecAXPY(e, -1.0, x);CHKERRQ(ierr);
221     ierr = PetscObjectViewFromOptions((PetscObject) e, NULL, "-error_view");CHKERRQ(ierr);
222     ierr = VecNorm(e, NORM_2, &errorl2);CHKERRQ(ierr);
223     ierr = VecNorm(e, NORM_INFINITY, &errorinf);CHKERRQ(ierr);
224     ierr = VecGetSize(e, &N);CHKERRQ(ierr);
225     ierr = PetscPrintf(PETSC_COMM_WORLD, "N: %D error L2 %g inf %g\n", N, (double) errorl2/PetscSqrtReal(N), (double) errorinf);CHKERRQ(ierr);
226     ierr = VecDestroy(&e);CHKERRQ(ierr);
227     ierr = PetscLogEventSetDof(SNES_Solve, 0, N);CHKERRQ(ierr);
228     ierr = PetscLogEventSetError(SNES_Solve, 0, errorl2/PetscSqrtReal(N));CHKERRQ(ierr);
229   }
230 
231   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
232      Free work space.  All PETSc objects should be destroyed when they
233      are no longer needed.
234    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
235 #if defined(PETSC_HAVE_MATLAB_ENGINE)
236   ierr = VecDestroy(&r);CHKERRQ(ierr);
237 #endif
238   ierr = VecDestroy(&x);CHKERRQ(ierr);
239   ierr = SNESDestroy(&snes);CHKERRQ(ierr);
240   ierr = DMDestroy(&da);CHKERRQ(ierr);
241   ierr = PetscFinalize();
242   return ierr;
243 }
244 /* ------------------------------------------------------------------- */
245 /*
246    FormInitialGuess - Forms initial approximation.
247 
248    Input Parameters:
249    da - The DM
250    user - user-defined application context
251 
252    Output Parameter:
253    X - vector
254  */
255 PetscErrorCode FormInitialGuess(DM da,AppCtx *user,Vec X)
256 {
257   PetscInt       i,j,Mx,My,xs,ys,xm,ym;
258   PetscErrorCode ierr;
259   PetscReal      lambda,temp1,temp,hx,hy;
260   PetscScalar    **x;
261 
262   PetscFunctionBeginUser;
263   ierr = DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE);CHKERRQ(ierr);
264 
265   lambda = user->param;
266   hx     = 1.0/(PetscReal)(Mx-1);
267   hy     = 1.0/(PetscReal)(My-1);
268   temp1  = lambda/(lambda + 1.0);
269 
270   /*
271      Get a pointer to vector data.
272        - For default PETSc vectors, VecGetArray() returns a pointer to
273          the data array.  Otherwise, the routine is implementation dependent.
274        - You MUST call VecRestoreArray() when you no longer need access to
275          the array.
276   */
277   ierr = DMDAVecGetArray(da,X,&x);CHKERRQ(ierr);
278 
279   /*
280      Get local grid boundaries (for 2-dimensional DMDA):
281        xs, ys   - starting grid indices (no ghost points)
282        xm, ym   - widths of local grid (no ghost points)
283 
284   */
285   ierr = DMDAGetCorners(da,&xs,&ys,NULL,&xm,&ym,NULL);CHKERRQ(ierr);
286 
287   /*
288      Compute initial guess over the locally owned part of the grid
289   */
290   for (j=ys; j<ys+ym; j++) {
291     temp = (PetscReal)(PetscMin(j,My-j-1))*hy;
292     for (i=xs; i<xs+xm; i++) {
293       if (i == 0 || j == 0 || i == Mx-1 || j == My-1) {
294         /* boundary conditions are all zero Dirichlet */
295         x[j][i] = 0.0;
296       } else {
297         x[j][i] = temp1*PetscSqrtReal(PetscMin((PetscReal)(PetscMin(i,Mx-i-1))*hx,temp));
298       }
299     }
300   }
301 
302   /*
303      Restore vector
304   */
305   ierr = DMDAVecRestoreArray(da,X,&x);CHKERRQ(ierr);
306   PetscFunctionReturn(0);
307 }
308 
309 /*
310   FormExactSolution - Forms MMS solution
311 
312   Input Parameters:
313   da - The DM
314   user - user-defined application context
315 
316   Output Parameter:
317   X - vector
318  */
319 PetscErrorCode FormExactSolution(DM da, AppCtx *user, Vec U)
320 {
321   DM             coordDA;
322   Vec            coordinates;
323   DMDACoor2d   **coords;
324   PetscScalar  **u;
325   PetscInt       xs, ys, xm, ym, i, j;
326   PetscErrorCode ierr;
327 
328   PetscFunctionBeginUser;
329   ierr = DMDAGetCorners(da, &xs, &ys, NULL, &xm, &ym, NULL);CHKERRQ(ierr);
330   ierr = DMGetCoordinateDM(da, &coordDA);CHKERRQ(ierr);
331   ierr = DMGetCoordinates(da, &coordinates);CHKERRQ(ierr);
332   ierr = DMDAVecGetArray(coordDA, coordinates, &coords);CHKERRQ(ierr);
333   ierr = DMDAVecGetArray(da, U, &u);CHKERRQ(ierr);
334   for (j = ys; j < ys+ym; ++j) {
335     for (i = xs; i < xs+xm; ++i) {
336       user->mms_solution(user,&coords[j][i],&u[j][i]);
337     }
338   }
339   ierr = DMDAVecRestoreArray(da, U, &u);CHKERRQ(ierr);
340   ierr = DMDAVecRestoreArray(coordDA, coordinates, &coords);CHKERRQ(ierr);
341   PetscFunctionReturn(0);
342 }
343 
344 PetscErrorCode ZeroBCSolution(AppCtx *user,const DMDACoor2d *c,PetscScalar *u)
345 {
346   u[0] = 0.;
347   return 0;
348 }
349 
350 /* The functions below evaluate the MMS solution u(x,y) and associated forcing
351 
352      f(x,y) = -u_xx - u_yy - lambda exp(u)
353 
354   such that u(x,y) is an exact solution with f(x,y) as the right hand side forcing term.
355  */
356 PetscErrorCode MMSSolution1(AppCtx *user,const DMDACoor2d *c,PetscScalar *u)
357 {
358   PetscReal x = PetscRealPart(c->x), y = PetscRealPart(c->y);
359   u[0] = x*(1 - x)*y*(1 - y);
360   PetscLogFlops(5);
361   return 0;
362 }
363 PetscErrorCode MMSForcing1(AppCtx *user,const DMDACoor2d *c,PetscScalar *f)
364 {
365   PetscReal x = PetscRealPart(c->x), y = PetscRealPart(c->y);
366   f[0] = 2*x*(1 - x) + 2*y*(1 - y) - user->param*PetscExpReal(x*(1 - x)*y*(1 - y));
367   return 0;
368 }
369 
370 PetscErrorCode MMSSolution2(AppCtx *user,const DMDACoor2d *c,PetscScalar *u)
371 {
372   PetscReal x = PetscRealPart(c->x), y = PetscRealPart(c->y);
373   u[0] = PetscSinReal(PETSC_PI*x)*PetscSinReal(PETSC_PI*y);
374   PetscLogFlops(5);
375   return 0;
376 }
377 PetscErrorCode MMSForcing2(AppCtx *user,const DMDACoor2d *c,PetscScalar *f)
378 {
379   PetscReal x = PetscRealPart(c->x), y = PetscRealPart(c->y);
380   f[0] = 2*PetscSqr(PETSC_PI)*PetscSinReal(PETSC_PI*x)*PetscSinReal(PETSC_PI*y) - user->param*PetscExpReal(PetscSinReal(PETSC_PI*x)*PetscSinReal(PETSC_PI*y));
381   return 0;
382 }
383 
384 PetscErrorCode MMSSolution3(AppCtx *user,const DMDACoor2d *c,PetscScalar *u)
385 {
386   PetscReal x = PetscRealPart(c->x), y = PetscRealPart(c->y);
387   u[0] = PetscSinReal(user->m*PETSC_PI*x*(1-y))*PetscSinReal(user->n*PETSC_PI*y*(1-x));
388   PetscLogFlops(5);
389   return 0;
390 }
391 PetscErrorCode MMSForcing3(AppCtx *user,const DMDACoor2d *c,PetscScalar *f)
392 {
393   PetscReal x = PetscRealPart(c->x), y = PetscRealPart(c->y);
394   PetscReal m = user->m, n = user->n, lambda = user->param;
395   f[0] = (-(PetscExpReal(PetscSinReal(m*PETSC_PI*x*(1 - y))*PetscSinReal(n*PETSC_PI*(1 - x)*y))*lambda)
396           + PetscSqr(PETSC_PI)*(-2*m*n*((-1 + x)*x + (-1 + y)*y)*PetscCosReal(m*PETSC_PI*x*(-1 + y))*PetscCosReal(n*PETSC_PI*(-1 + x)*y)
397                                 + (PetscSqr(m)*(PetscSqr(x) + PetscSqr(-1 + y)) + PetscSqr(n)*(PetscSqr(-1 + x) + PetscSqr(y)))
398                                 *PetscSinReal(m*PETSC_PI*x*(-1 + y))*PetscSinReal(n*PETSC_PI*(-1 + x)*y)));
399   return 0;
400 }
401 
402 PetscErrorCode MMSSolution4(AppCtx *user,const DMDACoor2d *c,PetscScalar *u)
403 {
404   const PetscReal Lx = 1.,Ly = 1.;
405   PetscReal x = PetscRealPart(c->x), y = PetscRealPart(c->y);
406   u[0] = (PetscPowReal(x,4)-PetscSqr(Lx)*PetscSqr(x))*(PetscPowReal(y,4)-PetscSqr(Ly)*PetscSqr(y));
407   PetscLogFlops(9);
408   return 0;
409 }
410 PetscErrorCode MMSForcing4(AppCtx *user,const DMDACoor2d *c,PetscScalar *f)
411 {
412   const PetscReal Lx = 1.,Ly = 1.;
413   PetscReal x = PetscRealPart(c->x), y = PetscRealPart(c->y);
414   f[0] = (2*PetscSqr(x)*(PetscSqr(x)-PetscSqr(Lx))*(PetscSqr(Ly)-6*PetscSqr(y))
415           + 2*PetscSqr(y)*(PetscSqr(Lx)-6*PetscSqr(x))*(PetscSqr(y)-PetscSqr(Ly))
416           - user->param*PetscExpReal((PetscPowReal(x,4)-PetscSqr(Lx)*PetscSqr(x))*(PetscPowReal(y,4)-PetscSqr(Ly)*PetscSqr(y))));
417   return 0;
418 }
419 
420 /* ------------------------------------------------------------------- */
421 /*
422    FormFunctionLocal - Evaluates nonlinear function, F(x) on local process patch
423 
424 
425  */
426 PetscErrorCode FormFunctionLocal(DMDALocalInfo *info,PetscScalar **x,PetscScalar **f,AppCtx *user)
427 {
428   PetscErrorCode ierr;
429   PetscInt       i,j;
430   PetscReal      lambda,hx,hy,hxdhy,hydhx;
431   PetscScalar    u,ue,uw,un,us,uxx,uyy,mms_solution,mms_forcing;
432   DMDACoor2d     c;
433 
434   PetscFunctionBeginUser;
435   lambda = user->param;
436   hx     = 1.0/(PetscReal)(info->mx-1);
437   hy     = 1.0/(PetscReal)(info->my-1);
438   hxdhy  = hx/hy;
439   hydhx  = hy/hx;
440   /*
441      Compute function over the locally owned part of the grid
442   */
443   for (j=info->ys; j<info->ys+info->ym; j++) {
444     for (i=info->xs; i<info->xs+info->xm; i++) {
445       if (i == 0 || j == 0 || i == info->mx-1 || j == info->my-1) {
446         c.x = i*hx; c.y = j*hy;
447         ierr = user->mms_solution(user,&c,&mms_solution);CHKERRQ(ierr);
448         f[j][i] = 2.0*(hydhx+hxdhy)*(x[j][i] - mms_solution);
449       } else {
450         u  = x[j][i];
451         uw = x[j][i-1];
452         ue = x[j][i+1];
453         un = x[j-1][i];
454         us = x[j+1][i];
455 
456         /* Enforce boundary conditions at neighboring points -- setting these values causes the Jacobian to be symmetric. */
457         if (i-1 == 0) {c.x = (i-1)*hx; c.y = j*hy; ierr = user->mms_solution(user,&c,&uw);CHKERRQ(ierr);}
458         if (i+1 == info->mx-1) {c.x = (i+1)*hx; c.y = j*hy; ierr = user->mms_solution(user,&c,&ue);CHKERRQ(ierr);}
459         if (j-1 == 0) {c.x = i*hx; c.y = (j-1)*hy; ierr = user->mms_solution(user,&c,&un);CHKERRQ(ierr);}
460         if (j+1 == info->my-1) {c.x = i*hx; c.y = (j+1)*hy; ierr = user->mms_solution(user,&c,&us);CHKERRQ(ierr);}
461 
462         uxx     = (2.0*u - uw - ue)*hydhx;
463         uyy     = (2.0*u - un - us)*hxdhy;
464         mms_forcing = 0;
465         c.x = i*hx; c.y = j*hy;
466         if (user->mms_forcing) {ierr = user->mms_forcing(user,&c,&mms_forcing);CHKERRQ(ierr);}
467         f[j][i] = uxx + uyy - hx*hy*(lambda*PetscExpScalar(u) + mms_forcing);
468       }
469     }
470   }
471   ierr = PetscLogFlops(11.0*info->ym*info->xm);CHKERRQ(ierr);
472   PetscFunctionReturn(0);
473 }
474 
475 /* FormObjectiveLocal - Evaluates nonlinear function, F(x) on local process patch */
476 PetscErrorCode FormObjectiveLocal(DMDALocalInfo *info,PetscScalar **x,PetscReal *obj,AppCtx *user)
477 {
478   PetscErrorCode ierr;
479   PetscInt       i,j;
480   PetscReal      lambda,hx,hy,hxdhy,hydhx,sc,lobj=0;
481   PetscScalar    u,ue,uw,un,us,uxux,uyuy;
482   MPI_Comm       comm;
483 
484   PetscFunctionBeginUser;
485   *obj   = 0;
486   ierr = PetscObjectGetComm((PetscObject)info->da,&comm);CHKERRQ(ierr);
487   lambda = user->param;
488   hx     = 1.0/(PetscReal)(info->mx-1);
489   hy     = 1.0/(PetscReal)(info->my-1);
490   sc     = hx*hy*lambda;
491   hxdhy  = hx/hy;
492   hydhx  = hy/hx;
493   /*
494      Compute function over the locally owned part of the grid
495   */
496   for (j=info->ys; j<info->ys+info->ym; j++) {
497     for (i=info->xs; i<info->xs+info->xm; i++) {
498       if (i == 0 || j == 0 || i == info->mx-1 || j == info->my-1) {
499         lobj += PetscRealPart((hydhx + hxdhy)*x[j][i]*x[j][i]);
500       } else {
501         u  = x[j][i];
502         uw = x[j][i-1];
503         ue = x[j][i+1];
504         un = x[j-1][i];
505         us = x[j+1][i];
506 
507         if (i-1 == 0) uw = 0.;
508         if (i+1 == info->mx-1) ue = 0.;
509         if (j-1 == 0) un = 0.;
510         if (j+1 == info->my-1) us = 0.;
511 
512         /* F[u] = 1/2\int_{\omega}\nabla^2u(x)*u(x)*dx */
513 
514         uxux = u*(2.*u - ue - uw)*hydhx;
515         uyuy = u*(2.*u - un - us)*hxdhy;
516 
517         lobj += PetscRealPart(0.5*(uxux + uyuy) - sc*PetscExpScalar(u));
518       }
519     }
520   }
521   ierr = PetscLogFlops(12.0*info->ym*info->xm);CHKERRQ(ierr);
522   ierr = MPI_Allreduce(&lobj,obj,1,MPIU_REAL,MPIU_SUM,comm);CHKERRQ(ierr);
523   PetscFunctionReturn(0);
524 }
525 
526 /*
527    FormJacobianLocal - Evaluates Jacobian matrix on local process patch
528 */
529 PetscErrorCode FormJacobianLocal(DMDALocalInfo *info,PetscScalar **x,Mat jac,Mat jacpre,AppCtx *user)
530 {
531   PetscErrorCode ierr;
532   PetscInt       i,j,k;
533   MatStencil     col[5],row;
534   PetscScalar    lambda,v[5],hx,hy,hxdhy,hydhx,sc;
535   DM             coordDA;
536   Vec            coordinates;
537   DMDACoor2d   **coords;
538 
539   PetscFunctionBeginUser;
540   lambda = user->param;
541   /* Extract coordinates */
542   ierr = DMGetCoordinateDM(info->da, &coordDA);CHKERRQ(ierr);
543   ierr = DMGetCoordinates(info->da, &coordinates);CHKERRQ(ierr);
544   ierr = DMDAVecGetArray(coordDA, coordinates, &coords);CHKERRQ(ierr);
545   hx     = info->xm > 1 ? PetscRealPart(coords[info->ys][info->xs+1].x) - PetscRealPart(coords[info->ys][info->xs].x) : 1.0;
546   hy     = info->ym > 1 ? PetscRealPart(coords[info->ys+1][info->xs].y) - PetscRealPart(coords[info->ys][info->xs].y) : 1.0;
547   ierr = DMDAVecRestoreArray(coordDA, coordinates, &coords);CHKERRQ(ierr);
548   hxdhy  = hx/hy;
549   hydhx  = hy/hx;
550   sc     = hx*hy*lambda;
551 
552 
553   /*
554      Compute entries for the locally owned part of the Jacobian.
555       - Currently, all PETSc parallel matrix formats are partitioned by
556         contiguous chunks of rows across the processors.
557       - Each processor needs to insert only elements that it owns
558         locally (but any non-local elements will be sent to the
559         appropriate processor during matrix assembly).
560       - Here, we set all entries for a particular row at once.
561       - We can set matrix entries either using either
562         MatSetValuesLocal() or MatSetValues(), as discussed above.
563   */
564   for (j=info->ys; j<info->ys+info->ym; j++) {
565     for (i=info->xs; i<info->xs+info->xm; i++) {
566       row.j = j; row.i = i;
567       /* boundary points */
568       if (i == 0 || j == 0 || i == info->mx-1 || j == info->my-1) {
569         v[0] =  2.0*(hydhx + hxdhy);
570         ierr = MatSetValuesStencil(jacpre,1,&row,1,&row,v,INSERT_VALUES);CHKERRQ(ierr);
571       } else {
572         k = 0;
573         /* interior grid points */
574         if (j-1 != 0) {
575           v[k]     = -hxdhy;
576           col[k].j = j - 1; col[k].i = i;
577           k++;
578         }
579         if (i-1 != 0) {
580           v[k]     = -hydhx;
581           col[k].j = j;     col[k].i = i-1;
582           k++;
583         }
584 
585         v[k] = 2.0*(hydhx + hxdhy) - sc*PetscExpScalar(x[j][i]); col[k].j = row.j; col[k].i = row.i; k++;
586 
587         if (i+1 != info->mx-1) {
588           v[k]     = -hydhx;
589           col[k].j = j;     col[k].i = i+1;
590           k++;
591         }
592         if (j+1 != info->mx-1) {
593           v[k]     = -hxdhy;
594           col[k].j = j + 1; col[k].i = i;
595           k++;
596         }
597         ierr = MatSetValuesStencil(jacpre,1,&row,k,col,v,INSERT_VALUES);CHKERRQ(ierr);
598       }
599     }
600   }
601 
602   /*
603      Assemble matrix, using the 2-step process:
604        MatAssemblyBegin(), MatAssemblyEnd().
605   */
606   ierr = MatAssemblyBegin(jacpre,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
607   ierr = MatAssemblyEnd(jacpre,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
608   /*
609      Tell the matrix we will never add a new nonzero location to the
610      matrix. If we do, it will generate an error.
611   */
612   ierr = MatSetOption(jac,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE);CHKERRQ(ierr);
613   PetscFunctionReturn(0);
614 }
615 
616 #if defined(PETSC_HAVE_MATLAB_ENGINE)
617 PetscErrorCode FormFunctionMatlab(SNES snes,Vec X,Vec F,void *ptr)
618 {
619   AppCtx         *user = (AppCtx*)ptr;
620   PetscErrorCode ierr;
621   PetscInt       Mx,My;
622   PetscReal      lambda,hx,hy;
623   Vec            localX,localF;
624   MPI_Comm       comm;
625   DM             da;
626 
627   PetscFunctionBeginUser;
628   ierr = SNESGetDM(snes,&da);CHKERRQ(ierr);
629   ierr = DMGetLocalVector(da,&localX);CHKERRQ(ierr);
630   ierr = DMGetLocalVector(da,&localF);CHKERRQ(ierr);
631   ierr = PetscObjectSetName((PetscObject)localX,"localX");CHKERRQ(ierr);
632   ierr = PetscObjectSetName((PetscObject)localF,"localF");CHKERRQ(ierr);
633   ierr = DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE);CHKERRQ(ierr);
634 
635   lambda = user->param;
636   hx     = 1.0/(PetscReal)(Mx-1);
637   hy     = 1.0/(PetscReal)(My-1);
638 
639   ierr = PetscObjectGetComm((PetscObject)snes,&comm);CHKERRQ(ierr);
640   /*
641      Scatter ghost points to local vector,using the 2-step process
642         DMGlobalToLocalBegin(),DMGlobalToLocalEnd().
643      By placing code between these two statements, computations can be
644      done while messages are in transition.
645   */
646   ierr = DMGlobalToLocalBegin(da,X,INSERT_VALUES,localX);CHKERRQ(ierr);
647   ierr = DMGlobalToLocalEnd(da,X,INSERT_VALUES,localX);CHKERRQ(ierr);
648   ierr = PetscMatlabEnginePut(PETSC_MATLAB_ENGINE_(comm),(PetscObject)localX);CHKERRQ(ierr);
649   ierr = PetscMatlabEngineEvaluate(PETSC_MATLAB_ENGINE_(comm),"localF=ex5m(localX,%18.16e,%18.16e,%18.16e)",hx,hy,lambda);CHKERRQ(ierr);
650   ierr = PetscMatlabEngineGet(PETSC_MATLAB_ENGINE_(comm),(PetscObject)localF);CHKERRQ(ierr);
651 
652   /*
653      Insert values into global vector
654   */
655   ierr = DMLocalToGlobalBegin(da,localF,INSERT_VALUES,F);CHKERRQ(ierr);
656   ierr = DMLocalToGlobalEnd(da,localF,INSERT_VALUES,F);CHKERRQ(ierr);
657   ierr = DMRestoreLocalVector(da,&localX);CHKERRQ(ierr);
658   ierr = DMRestoreLocalVector(da,&localF);CHKERRQ(ierr);
659   PetscFunctionReturn(0);
660 }
661 #endif
662 
663 /* ------------------------------------------------------------------- */
664 /*
665       Applies some sweeps on nonlinear Gauss-Seidel on each process
666 
667  */
668 PetscErrorCode NonlinearGS(SNES snes,Vec X, Vec B, void *ctx)
669 {
670   PetscInt       i,j,k,Mx,My,xs,ys,xm,ym,its,tot_its,sweeps,l;
671   PetscErrorCode ierr;
672   PetscReal      lambda,hx,hy,hxdhy,hydhx,sc;
673   PetscScalar    **x,**b,bij,F,F0=0,J,u,un,us,ue,eu,uw,uxx,uyy,y;
674   PetscReal      atol,rtol,stol;
675   DM             da;
676   AppCtx         *user;
677   Vec            localX,localB;
678 
679   PetscFunctionBeginUser;
680   tot_its = 0;
681   ierr    = SNESNGSGetSweeps(snes,&sweeps);CHKERRQ(ierr);
682   ierr    = SNESNGSGetTolerances(snes,&atol,&rtol,&stol,&its);CHKERRQ(ierr);
683   ierr    = SNESGetDM(snes,&da);CHKERRQ(ierr);
684   ierr    = DMGetApplicationContext(da,(void**)&user);CHKERRQ(ierr);
685 
686   ierr = DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE);CHKERRQ(ierr);
687 
688   lambda = user->param;
689   hx     = 1.0/(PetscReal)(Mx-1);
690   hy     = 1.0/(PetscReal)(My-1);
691   sc     = hx*hy*lambda;
692   hxdhy  = hx/hy;
693   hydhx  = hy/hx;
694 
695 
696   ierr = DMGetLocalVector(da,&localX);CHKERRQ(ierr);
697   if (B) {
698     ierr = DMGetLocalVector(da,&localB);CHKERRQ(ierr);
699   }
700   for (l=0; l<sweeps; l++) {
701 
702     ierr = DMGlobalToLocalBegin(da,X,INSERT_VALUES,localX);CHKERRQ(ierr);
703     ierr = DMGlobalToLocalEnd(da,X,INSERT_VALUES,localX);CHKERRQ(ierr);
704     if (B) {
705       ierr = DMGlobalToLocalBegin(da,B,INSERT_VALUES,localB);CHKERRQ(ierr);
706       ierr = DMGlobalToLocalEnd(da,B,INSERT_VALUES,localB);CHKERRQ(ierr);
707     }
708     /*
709      Get a pointer to vector data.
710      - For default PETSc vectors, VecGetArray() returns a pointer to
711      the data array.  Otherwise, the routine is implementation dependent.
712      - You MUST call VecRestoreArray() when you no longer need access to
713      the array.
714      */
715     ierr = DMDAVecGetArray(da,localX,&x);CHKERRQ(ierr);
716     if (B) ierr = DMDAVecGetArray(da,localB,&b);CHKERRQ(ierr);
717     /*
718      Get local grid boundaries (for 2-dimensional DMDA):
719      xs, ys   - starting grid indices (no ghost points)
720      xm, ym   - widths of local grid (no ghost points)
721      */
722     ierr = DMDAGetCorners(da,&xs,&ys,NULL,&xm,&ym,NULL);CHKERRQ(ierr);
723 
724     for (j=ys; j<ys+ym; j++) {
725       for (i=xs; i<xs+xm; i++) {
726         if (i == 0 || j == 0 || i == Mx-1 || j == My-1) {
727           /* boundary conditions are all zero Dirichlet */
728           x[j][i] = 0.0;
729         } else {
730           if (B) bij = b[j][i];
731           else   bij = 0.;
732 
733           u  = x[j][i];
734           un = x[j-1][i];
735           us = x[j+1][i];
736           ue = x[j][i-1];
737           uw = x[j][i+1];
738 
739           for (k=0; k<its; k++) {
740             eu  = PetscExpScalar(u);
741             uxx = (2.0*u - ue - uw)*hydhx;
742             uyy = (2.0*u - un - us)*hxdhy;
743             F   = uxx + uyy - sc*eu - bij;
744             if (k == 0) F0 = F;
745             J  = 2.0*(hydhx + hxdhy) - sc*eu;
746             y  = F/J;
747             u -= y;
748             tot_its++;
749 
750             if (atol > PetscAbsReal(PetscRealPart(F)) ||
751                 rtol*PetscAbsReal(PetscRealPart(F0)) > PetscAbsReal(PetscRealPart(F)) ||
752                 stol*PetscAbsReal(PetscRealPart(u)) > PetscAbsReal(PetscRealPart(y))) {
753               break;
754             }
755           }
756           x[j][i] = u;
757         }
758       }
759     }
760     /*
761      Restore vector
762      */
763     ierr = DMDAVecRestoreArray(da,localX,&x);CHKERRQ(ierr);
764     ierr = DMLocalToGlobalBegin(da,localX,INSERT_VALUES,X);CHKERRQ(ierr);
765     ierr = DMLocalToGlobalEnd(da,localX,INSERT_VALUES,X);CHKERRQ(ierr);
766   }
767   ierr = PetscLogFlops(tot_its*(21.0));CHKERRQ(ierr);
768   ierr = DMRestoreLocalVector(da,&localX);CHKERRQ(ierr);
769   if (B) {
770     ierr = DMDAVecRestoreArray(da,localB,&b);CHKERRQ(ierr);
771     ierr = DMRestoreLocalVector(da,&localB);CHKERRQ(ierr);
772   }
773   PetscFunctionReturn(0);
774 }
775 
776 /*TEST
777 
778    test:
779      suffix: asm_0
780      requires: !single
781      args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 2 -pc_asm_overlap 0 -pc_asm_local_type additive -sub_pc_type lu
782 
783    test:
784      suffix: msm_0
785      requires: !single
786      args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 2 -pc_asm_overlap 0 -pc_asm_local_type multiplicative -sub_pc_type lu
787 
788    test:
789      suffix: asm_1
790      requires: !single
791      args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 2 -pc_asm_overlap 0 -pc_asm_local_type additive -sub_pc_type lu -da_grid_x 8
792 
793    test:
794      suffix: msm_1
795      requires: !single
796      args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 2 -pc_asm_overlap 0 -pc_asm_local_type multiplicative -sub_pc_type lu -da_grid_x 8
797 
798    test:
799      suffix: asm_2
800      requires: !single
801      args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 3 -pc_asm_overlap 0 -pc_asm_local_type additive -sub_pc_type lu -da_grid_x 8
802 
803    test:
804      suffix: msm_2
805      requires: !single
806      args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 3 -pc_asm_overlap 0 -pc_asm_local_type multiplicative -sub_pc_type lu -da_grid_x 8
807 
808    test:
809      suffix: asm_3
810      requires: !single
811      args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 4 -pc_asm_overlap 0 -pc_asm_local_type additive -sub_pc_type lu -da_grid_x 8
812 
813    test:
814      suffix: msm_3
815      requires: !single
816      args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 4 -pc_asm_overlap 0 -pc_asm_local_type multiplicative -sub_pc_type lu -da_grid_x 8
817 
818    test:
819      suffix: asm_4
820      requires: !single
821      nsize: 2
822      args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 2 -pc_asm_overlap 0 -pc_asm_local_type additive -sub_pc_type lu -da_grid_x 8
823 
824    test:
825      suffix: msm_4
826      requires: !single
827      nsize: 2
828      args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 2 -pc_asm_overlap 0 -pc_asm_local_type multiplicative -sub_pc_type lu -da_grid_x 8
829 
830    test:
831      suffix: asm_5
832      requires: !single
833      nsize: 2
834      args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 4 -pc_asm_overlap 0 -pc_asm_local_type additive -sub_pc_type lu -da_grid_x 8
835 
836    test:
837      suffix: msm_5
838      requires: !single
839      nsize: 2
840      args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 4 -pc_asm_overlap 0 -pc_asm_local_type multiplicative -sub_pc_type lu -da_grid_x 8
841 
842    test:
843      args: -snes_rtol 1.e-5 -pc_type mg -ksp_monitor_short -snes_view -pc_mg_levels 3 -pc_mg_galerkin pmat -da_grid_x 17 -da_grid_y 17 -mg_levels_ksp_monitor_short -mg_levels_ksp_norm_type unpreconditioned -snes_monitor_short -mg_levels_ksp_chebyshev_esteig 0.5,1.1 -mg_levels_pc_type sor -pc_mg_type full
844      requires: !single
845 
846    test:
847      suffix: 2
848      args: -pc_type mg -ksp_converged_reason -snes_view -pc_mg_galerkin pmat -snes_grid_sequence 3 -mg_levels_ksp_norm_type unpreconditioned -snes_monitor_short -mg_levels_ksp_chebyshev_esteig 0.5,1.1 -mg_levels_pc_type sor -pc_mg_type full -ksp_atol -1.
849 
850    test:
851      suffix: 3
852      nsize: 2
853      args: -snes_grid_sequence 2 -snes_mf_operator -snes_converged_reason -snes_view -pc_type mg -snes_atol -1 -snes_rtol 1.e-2
854      filter: grep -v "otal number of function evaluations"
855 
856    test:
857      suffix: 4
858      nsize: 2
859      args: -snes_grid_sequence 2 -snes_monitor_short -ksp_converged_reason -snes_converged_reason -snes_view -pc_type mg -snes_atol -1 -ksp_atol -1
860 
861    test:
862      suffix: 5_anderson
863      args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type anderson
864 
865    test:
866      suffix: 5_aspin
867      nsize: 4
868      args: -snes_monitor_short -ksp_monitor_short -snes_converged_reason -da_refine 4 -da_overlap 3 -snes_type aspin -snes_view
869 
870    test:
871      suffix: 5_broyden
872      args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type qn -snes_qn_type broyden -snes_qn_m 10
873 
874    test:
875      suffix: 5_fas
876      args: -fas_coarse_snes_max_it 1 -fas_coarse_pc_type lu -fas_coarse_ksp_type preonly -snes_monitor_short -snes_type fas -fas_coarse_ksp_type richardson -da_refine 6
877      requires: !single
878 
879    test:
880      suffix: 5_fas_additive
881      args: -fas_coarse_snes_max_it 1 -fas_coarse_pc_type lu -fas_coarse_ksp_type preonly -snes_monitor_short -snes_type fas -fas_coarse_ksp_type richardson -da_refine 6 -snes_fas_type additive -snes_max_it 50
882 
883    test:
884      suffix: 5_fas_monitor
885      args: -da_refine 1 -snes_type fas -snes_fas_monitor
886      requires: !single
887 
888    test:
889      suffix: 5_ls
890      args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type newtonls
891 
892    test:
893      suffix: 5_ls_sell_sor
894      args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type newtonls -dm_mat_type sell -pc_type sor
895      output_file: output/ex5_5_ls.out
896 
897    test:
898      suffix: 5_nasm
899      nsize: 4
900      args: -snes_monitor_short -snes_converged_reason -da_refine 4 -da_overlap 3 -snes_type nasm -snes_nasm_type restrict -snes_max_it 10
901 
902    test:
903      suffix: 5_ncg
904      args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type ncg -snes_ncg_type fr
905 
906    test:
907      suffix: 5_newton_asm_dmda
908      nsize: 4
909      args: -snes_monitor_short -ksp_monitor_short -snes_converged_reason -da_refine 4 -da_overlap 3 -snes_type newtonls -pc_type asm -pc_asm_dm_subdomains -malloc_dump
910      requires: !single
911 
912    test:
913      suffix: 5_newton_gasm_dmda
914      nsize: 4
915      args: -snes_monitor_short -ksp_monitor_short -snes_converged_reason -da_refine 4 -da_overlap 3 -snes_type newtonls -pc_type gasm -malloc_dump
916      requires: !single
917 
918    test:
919      suffix: 5_ngmres
920      args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type ngmres -snes_ngmres_m 10
921 
922    test:
923      suffix: 5_ngmres_fas
924      args: -snes_rtol 1.e-4 -snes_type ngmres -npc_fas_coarse_snes_max_it 1 -npc_fas_coarse_snes_type newtonls -npc_fas_coarse_pc_type lu -npc_fas_coarse_ksp_type preonly -snes_ngmres_m 10 -snes_monitor_short -npc_snes_max_it 1 -npc_snes_type fas -npc_fas_coarse_ksp_type richardson -da_refine 6
925 
926    test:
927      suffix: 5_ngmres_ngs
928      args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type ngmres -npc_snes_type ngs -npc_snes_max_it 1
929 
930    test:
931      suffix: 5_ngmres_nrichardson
932      args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type ngmres -snes_ngmres_m 10 -npc_snes_type nrichardson -npc_snes_max_it 3
933      output_file: output/ex5_5_ngmres_richardson.out
934 
935    test:
936      suffix: 5_nrichardson
937      args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type nrichardson
938 
939    test:
940      suffix: 5_qn
941      args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type qn -snes_linesearch_type cp -snes_qn_m 10
942 
943    test:
944      suffix: 6
945      nsize: 4
946      args: -snes_converged_reason -ksp_converged_reason -da_grid_x 129 -da_grid_y 129 -pc_type mg -pc_mg_levels 8 -mg_levels_ksp_type chebyshev -mg_levels_ksp_chebyshev_esteig 0,0.5,0,1.1 -mg_levels_ksp_max_it 2
947 
948    test:
949      requires: complex !single
950      suffix: complex
951      args: -snes_mf_operator -mat_mffd_complex -snes_monitor
952 
953 TEST*/
954