xref: /petsc/src/snes/tutorials/ex5.c (revision 5f80ce2ab25dff0f4601e710601cbbcecf323266)
1 static char help[] = "Bratu nonlinear PDE in 2d.\n\
2 We solve the  Bratu (SFI - solid fuel ignition) problem in a 2D rectangular\n\
3 domain, using distributed arrays (DMDAs) to partition the parallel grid.\n\
4 The command line options include:\n\
5   -par <parameter>, where <parameter> indicates the problem's nonlinearity\n\
6      problem SFI:  <parameter> = Bratu parameter (0 <= par <= 6.81)\n\n\
7   -m_par/n_par <parameter>, where <parameter> indicates an integer\n \
8       that MMS3 will be evaluated with 2^m_par, 2^n_par";
9 
10 /*T
11    Concepts: SNES^parallel Bratu example
12    Concepts: DMDA^using distributed arrays;
13    Concepts: IS coloirng types;
14    Processors: n
15 T*/
16 
17 /* ------------------------------------------------------------------------
18 
19     Solid Fuel Ignition (SFI) problem.  This problem is modeled by
20     the partial differential equation
21 
22             -Laplacian u - lambda*exp(u) = 0,  0 < x,y < 1,
23 
24     with boundary conditions
25 
26              u = 0  for  x = 0, x = 1, y = 0, y = 1.
27 
28     A finite difference approximation with the usual 5-point stencil
29     is used to discretize the boundary value problem to obtain a nonlinear
30     system of equations.
31 
32       This example shows how geometric multigrid can be run transparently with a nonlinear solver so long
33       as SNESSetDM() is provided. Example usage
34 
35       ./ex5 -pc_type mg -ksp_monitor  -snes_view -pc_mg_levels 3 -pc_mg_galerkin pmat -da_grid_x 17 -da_grid_y 17
36              -mg_levels_ksp_monitor -snes_monitor -mg_levels_pc_type sor -pc_mg_type full
37 
38       or to run with grid sequencing on the nonlinear problem (note that you do not need to provide the number of
39          multigrid levels, it will be determined automatically based on the number of refinements done)
40 
41       ./ex5 -pc_type mg -ksp_monitor  -snes_view -pc_mg_galerkin pmat -snes_grid_sequence 3
42              -mg_levels_ksp_monitor -snes_monitor -mg_levels_pc_type sor -pc_mg_type full
43 
44   ------------------------------------------------------------------------- */
45 
46 /*
47    Include "petscdmda.h" so that we can use distributed arrays (DMDAs).
48    Include "petscsnes.h" so that we can use SNES solvers.  Note that this
49 */
50 #include <petscdm.h>
51 #include <petscdmda.h>
52 #include <petscsnes.h>
53 #include <petscmatlab.h>
54 #include <petsc/private/snesimpl.h> /* For SNES_Solve event */
55 
56 /*
57    User-defined application context - contains data needed by the
58    application-provided call-back routines, FormJacobianLocal() and
59    FormFunctionLocal().
60 */
61 typedef struct AppCtx AppCtx;
62 struct AppCtx {
63   PetscReal param;          /* test problem parameter */
64   PetscInt  m,n;            /* MMS3 parameters */
65   PetscErrorCode (*mms_solution)(AppCtx*,const DMDACoor2d*,PetscScalar*);
66   PetscErrorCode (*mms_forcing)(AppCtx*,const DMDACoor2d*,PetscScalar*);
67 };
68 
69 /*
70    User-defined routines
71 */
72 extern PetscErrorCode FormInitialGuess(DM,AppCtx*,Vec);
73 extern PetscErrorCode FormFunctionLocal(DMDALocalInfo*,PetscScalar**,PetscScalar**,AppCtx*);
74 extern PetscErrorCode FormExactSolution(DM,AppCtx*,Vec);
75 extern PetscErrorCode ZeroBCSolution(AppCtx*,const DMDACoor2d*,PetscScalar*);
76 extern PetscErrorCode MMSSolution1(AppCtx*,const DMDACoor2d*,PetscScalar*);
77 extern PetscErrorCode MMSForcing1(AppCtx*,const DMDACoor2d*,PetscScalar*);
78 extern PetscErrorCode MMSSolution2(AppCtx*,const DMDACoor2d*,PetscScalar*);
79 extern PetscErrorCode MMSForcing2(AppCtx*,const DMDACoor2d*,PetscScalar*);
80 extern PetscErrorCode MMSSolution3(AppCtx*,const DMDACoor2d*,PetscScalar*);
81 extern PetscErrorCode MMSForcing3(AppCtx*,const DMDACoor2d*,PetscScalar*);
82 extern PetscErrorCode MMSSolution4(AppCtx*,const DMDACoor2d*,PetscScalar*);
83 extern PetscErrorCode MMSForcing4(AppCtx*,const DMDACoor2d*,PetscScalar*);
84 extern PetscErrorCode FormJacobianLocal(DMDALocalInfo*,PetscScalar**,Mat,Mat,AppCtx*);
85 extern PetscErrorCode FormObjectiveLocal(DMDALocalInfo*,PetscScalar**,PetscReal*,AppCtx*);
86 extern PetscErrorCode FormFunctionMatlab(SNES,Vec,Vec,void*);
87 extern PetscErrorCode NonlinearGS(SNES,Vec,Vec,void*);
88 
89 int main(int argc,char **argv)
90 {
91   SNES           snes;                         /* nonlinear solver */
92   Vec            x;                            /* solution vector */
93   AppCtx         user;                         /* user-defined work context */
94   PetscInt       its;                          /* iterations for convergence */
95   PetscErrorCode ierr;
96   PetscReal      bratu_lambda_max = 6.81;
97   PetscReal      bratu_lambda_min = 0.;
98   PetscInt       MMS              = 0;
99   PetscBool      flg              = PETSC_FALSE;
100   DM             da;
101   Vec            r               = NULL;
102   KSP            ksp;
103   PetscInt       lits,slits;
104 
105   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
106      Initialize program
107      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
108 
109   ierr = PetscInitialize(&argc,&argv,(char*)0,help);if (ierr) return ierr;
110 
111   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
112      Initialize problem parameters
113   - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
114   user.param = 6.0;
115   CHKERRQ(PetscOptionsGetReal(NULL,NULL,"-par",&user.param,NULL));
116   PetscCheckFalse(user.param > bratu_lambda_max || user.param < bratu_lambda_min,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Lambda, %g, is out of range, [%g, %g]", user.param, bratu_lambda_min, bratu_lambda_max);
117   CHKERRQ(PetscOptionsGetInt(NULL,NULL,"-mms",&MMS,NULL));
118   if (MMS == 3) {
119     PetscInt mPar = 2, nPar = 1;
120     CHKERRQ(PetscOptionsGetInt(NULL,NULL,"-m_par",&mPar,NULL));
121     CHKERRQ(PetscOptionsGetInt(NULL,NULL,"-n_par",&nPar,NULL));
122     user.m = PetscPowInt(2,mPar);
123     user.n = PetscPowInt(2,nPar);
124   }
125 
126   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
127      Create nonlinear solver context
128      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
129   CHKERRQ(SNESCreate(PETSC_COMM_WORLD,&snes));
130   CHKERRQ(SNESSetCountersReset(snes,PETSC_FALSE));
131   CHKERRQ(SNESSetNGS(snes, NonlinearGS, NULL));
132 
133   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
134      Create distributed array (DMDA) to manage parallel grid and vectors
135   - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
136   CHKERRQ(DMDACreate2d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE,DMDA_STENCIL_STAR,4,4,PETSC_DECIDE,PETSC_DECIDE,1,1,NULL,NULL,&da));
137   CHKERRQ(DMSetFromOptions(da));
138   CHKERRQ(DMSetUp(da));
139   CHKERRQ(DMDASetUniformCoordinates(da, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0));
140   CHKERRQ(DMSetApplicationContext(da,&user));
141   CHKERRQ(SNESSetDM(snes,da));
142   /*  - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
143      Extract global vectors from DMDA; then duplicate for remaining
144      vectors that are the same types
145    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
146   CHKERRQ(DMCreateGlobalVector(da,&x));
147 
148   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
149      Set local function evaluation routine
150   - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
151   user.mms_solution = ZeroBCSolution;
152   switch (MMS) {
153   case 0: user.mms_solution = NULL; user.mms_forcing = NULL;
154   case 1: user.mms_solution = MMSSolution1; user.mms_forcing = MMSForcing1; break;
155   case 2: user.mms_solution = MMSSolution2; user.mms_forcing = MMSForcing2; break;
156   case 3: user.mms_solution = MMSSolution3; user.mms_forcing = MMSForcing3; break;
157   case 4: user.mms_solution = MMSSolution4; user.mms_forcing = MMSForcing4; break;
158   default: SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_USER,"Unknown MMS type %d",MMS);
159   }
160   CHKERRQ(DMDASNESSetFunctionLocal(da,INSERT_VALUES,(DMDASNESFunction)FormFunctionLocal,&user));
161   CHKERRQ(PetscOptionsGetBool(NULL,NULL,"-fd",&flg,NULL));
162   if (!flg) {
163     CHKERRQ(DMDASNESSetJacobianLocal(da,(DMDASNESJacobian)FormJacobianLocal,&user));
164   }
165 
166   CHKERRQ(PetscOptionsGetBool(NULL,NULL,"-obj",&flg,NULL));
167   if (flg) {
168     CHKERRQ(DMDASNESSetObjectiveLocal(da,(DMDASNESObjective)FormObjectiveLocal,&user));
169   }
170 
171   if (PetscDefined(HAVE_MATLAB_ENGINE)) {
172     PetscBool matlab_function = PETSC_FALSE;
173     CHKERRQ(PetscOptionsGetBool(NULL,NULL,"-matlab_function",&matlab_function,0));
174     if (matlab_function) {
175       CHKERRQ(VecDuplicate(x,&r));
176       CHKERRQ(SNESSetFunction(snes,r,FormFunctionMatlab,&user));
177     }
178   }
179 
180   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
181      Customize nonlinear solver; set runtime options
182    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
183   CHKERRQ(SNESSetFromOptions(snes));
184 
185   CHKERRQ(FormInitialGuess(da,&user,x));
186 
187   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
188      Solve nonlinear system
189      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
190   CHKERRQ(SNESSolve(snes,NULL,x));
191   CHKERRQ(SNESGetIterationNumber(snes,&its));
192 
193   CHKERRQ(SNESGetLinearSolveIterations(snes,&slits));
194   CHKERRQ(SNESGetKSP(snes,&ksp));
195   CHKERRQ(KSPGetTotalIterations(ksp,&lits));
196   PetscCheckFalse(lits != slits,PETSC_COMM_WORLD,PETSC_ERR_PLIB,"Number of total linear iterations reported by SNES %D does not match reported by KSP %D",slits,lits);
197   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
198    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
199 
200   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
201      If using MMS, check the l_2 error
202    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
203   if (MMS) {
204     Vec       e;
205     PetscReal errorl2, errorinf;
206     PetscInt  N;
207 
208     CHKERRQ(VecDuplicate(x, &e));
209     CHKERRQ(PetscObjectViewFromOptions((PetscObject) x, NULL, "-sol_view"));
210     CHKERRQ(FormExactSolution(da, &user, e));
211     CHKERRQ(PetscObjectViewFromOptions((PetscObject) e, NULL, "-exact_view"));
212     CHKERRQ(VecAXPY(e, -1.0, x));
213     CHKERRQ(PetscObjectViewFromOptions((PetscObject) e, NULL, "-error_view"));
214     CHKERRQ(VecNorm(e, NORM_2, &errorl2));
215     CHKERRQ(VecNorm(e, NORM_INFINITY, &errorinf));
216     CHKERRQ(VecGetSize(e, &N));
217     CHKERRQ(PetscPrintf(PETSC_COMM_WORLD, "N: %D error L2 %g inf %g\n", N, (double) errorl2/PetscSqrtReal((PetscReal)N), (double) errorinf));
218     CHKERRQ(VecDestroy(&e));
219     CHKERRQ(PetscLogEventSetDof(SNES_Solve, 0, N));
220     CHKERRQ(PetscLogEventSetError(SNES_Solve, 0, errorl2/PetscSqrtReal(N)));
221   }
222 
223   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
224      Free work space.  All PETSc objects should be destroyed when they
225      are no longer needed.
226    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
227   CHKERRQ(VecDestroy(&r));
228   CHKERRQ(VecDestroy(&x));
229   CHKERRQ(SNESDestroy(&snes));
230   CHKERRQ(DMDestroy(&da));
231   ierr = PetscFinalize();
232   return ierr;
233 }
234 /* ------------------------------------------------------------------- */
235 /*
236    FormInitialGuess - Forms initial approximation.
237 
238    Input Parameters:
239    da - The DM
240    user - user-defined application context
241 
242    Output Parameter:
243    X - vector
244  */
245 PetscErrorCode FormInitialGuess(DM da,AppCtx *user,Vec X)
246 {
247   PetscInt       i,j,Mx,My,xs,ys,xm,ym;
248   PetscReal      lambda,temp1,temp,hx,hy;
249   PetscScalar    **x;
250 
251   PetscFunctionBeginUser;
252   CHKERRQ(DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE));
253 
254   lambda = user->param;
255   hx     = 1.0/(PetscReal)(Mx-1);
256   hy     = 1.0/(PetscReal)(My-1);
257   temp1  = lambda/(lambda + 1.0);
258 
259   /*
260      Get a pointer to vector data.
261        - For default PETSc vectors, VecGetArray() returns a pointer to
262          the data array.  Otherwise, the routine is implementation dependent.
263        - You MUST call VecRestoreArray() when you no longer need access to
264          the array.
265   */
266   CHKERRQ(DMDAVecGetArray(da,X,&x));
267 
268   /*
269      Get local grid boundaries (for 2-dimensional DMDA):
270        xs, ys   - starting grid indices (no ghost points)
271        xm, ym   - widths of local grid (no ghost points)
272 
273   */
274   CHKERRQ(DMDAGetCorners(da,&xs,&ys,NULL,&xm,&ym,NULL));
275 
276   /*
277      Compute initial guess over the locally owned part of the grid
278   */
279   for (j=ys; j<ys+ym; j++) {
280     temp = (PetscReal)(PetscMin(j,My-j-1))*hy;
281     for (i=xs; i<xs+xm; i++) {
282       if (i == 0 || j == 0 || i == Mx-1 || j == My-1) {
283         /* boundary conditions are all zero Dirichlet */
284         x[j][i] = 0.0;
285       } else {
286         x[j][i] = temp1*PetscSqrtReal(PetscMin((PetscReal)(PetscMin(i,Mx-i-1))*hx,temp));
287       }
288     }
289   }
290 
291   /*
292      Restore vector
293   */
294   CHKERRQ(DMDAVecRestoreArray(da,X,&x));
295   PetscFunctionReturn(0);
296 }
297 
298 /*
299   FormExactSolution - Forms MMS solution
300 
301   Input Parameters:
302   da - The DM
303   user - user-defined application context
304 
305   Output Parameter:
306   X - vector
307  */
308 PetscErrorCode FormExactSolution(DM da, AppCtx *user, Vec U)
309 {
310   DM             coordDA;
311   Vec            coordinates;
312   DMDACoor2d   **coords;
313   PetscScalar  **u;
314   PetscInt       xs, ys, xm, ym, i, j;
315 
316   PetscFunctionBeginUser;
317   CHKERRQ(DMDAGetCorners(da, &xs, &ys, NULL, &xm, &ym, NULL));
318   CHKERRQ(DMGetCoordinateDM(da, &coordDA));
319   CHKERRQ(DMGetCoordinates(da, &coordinates));
320   CHKERRQ(DMDAVecGetArray(coordDA, coordinates, &coords));
321   CHKERRQ(DMDAVecGetArray(da, U, &u));
322   for (j = ys; j < ys+ym; ++j) {
323     for (i = xs; i < xs+xm; ++i) {
324       user->mms_solution(user,&coords[j][i],&u[j][i]);
325     }
326   }
327   CHKERRQ(DMDAVecRestoreArray(da, U, &u));
328   CHKERRQ(DMDAVecRestoreArray(coordDA, coordinates, &coords));
329   PetscFunctionReturn(0);
330 }
331 
332 PetscErrorCode ZeroBCSolution(AppCtx *user,const DMDACoor2d *c,PetscScalar *u)
333 {
334   u[0] = 0.;
335   return 0;
336 }
337 
338 /* The functions below evaluate the MMS solution u(x,y) and associated forcing
339 
340      f(x,y) = -u_xx - u_yy - lambda exp(u)
341 
342   such that u(x,y) is an exact solution with f(x,y) as the right hand side forcing term.
343  */
344 PetscErrorCode MMSSolution1(AppCtx *user,const DMDACoor2d *c,PetscScalar *u)
345 {
346   PetscReal x = PetscRealPart(c->x), y = PetscRealPart(c->y);
347   u[0] = x*(1 - x)*y*(1 - y);
348   PetscLogFlops(5);
349   return 0;
350 }
351 PetscErrorCode MMSForcing1(AppCtx *user,const DMDACoor2d *c,PetscScalar *f)
352 {
353   PetscReal x = PetscRealPart(c->x), y = PetscRealPart(c->y);
354   f[0] = 2*x*(1 - x) + 2*y*(1 - y) - user->param*PetscExpReal(x*(1 - x)*y*(1 - y));
355   return 0;
356 }
357 
358 PetscErrorCode MMSSolution2(AppCtx *user,const DMDACoor2d *c,PetscScalar *u)
359 {
360   PetscReal x = PetscRealPart(c->x), y = PetscRealPart(c->y);
361   u[0] = PetscSinReal(PETSC_PI*x)*PetscSinReal(PETSC_PI*y);
362   PetscLogFlops(5);
363   return 0;
364 }
365 PetscErrorCode MMSForcing2(AppCtx *user,const DMDACoor2d *c,PetscScalar *f)
366 {
367   PetscReal x = PetscRealPart(c->x), y = PetscRealPart(c->y);
368   f[0] = 2*PetscSqr(PETSC_PI)*PetscSinReal(PETSC_PI*x)*PetscSinReal(PETSC_PI*y) - user->param*PetscExpReal(PetscSinReal(PETSC_PI*x)*PetscSinReal(PETSC_PI*y));
369   return 0;
370 }
371 
372 PetscErrorCode MMSSolution3(AppCtx *user,const DMDACoor2d *c,PetscScalar *u)
373 {
374   PetscReal x = PetscRealPart(c->x), y = PetscRealPart(c->y);
375   u[0] = PetscSinReal(user->m*PETSC_PI*x*(1-y))*PetscSinReal(user->n*PETSC_PI*y*(1-x));
376   PetscLogFlops(5);
377   return 0;
378 }
379 PetscErrorCode MMSForcing3(AppCtx *user,const DMDACoor2d *c,PetscScalar *f)
380 {
381   PetscReal x = PetscRealPart(c->x), y = PetscRealPart(c->y);
382   PetscReal m = user->m, n = user->n, lambda = user->param;
383   f[0] = (-(PetscExpReal(PetscSinReal(m*PETSC_PI*x*(1 - y))*PetscSinReal(n*PETSC_PI*(1 - x)*y))*lambda)
384           + PetscSqr(PETSC_PI)*(-2*m*n*((-1 + x)*x + (-1 + y)*y)*PetscCosReal(m*PETSC_PI*x*(-1 + y))*PetscCosReal(n*PETSC_PI*(-1 + x)*y)
385                                 + (PetscSqr(m)*(PetscSqr(x) + PetscSqr(-1 + y)) + PetscSqr(n)*(PetscSqr(-1 + x) + PetscSqr(y)))
386                                 *PetscSinReal(m*PETSC_PI*x*(-1 + y))*PetscSinReal(n*PETSC_PI*(-1 + x)*y)));
387   return 0;
388 }
389 
390 PetscErrorCode MMSSolution4(AppCtx *user,const DMDACoor2d *c,PetscScalar *u)
391 {
392   const PetscReal Lx = 1.,Ly = 1.;
393   PetscReal x = PetscRealPart(c->x), y = PetscRealPart(c->y);
394   u[0] = (PetscPowReal(x,4)-PetscSqr(Lx)*PetscSqr(x))*(PetscPowReal(y,4)-PetscSqr(Ly)*PetscSqr(y));
395   PetscLogFlops(9);
396   return 0;
397 }
398 PetscErrorCode MMSForcing4(AppCtx *user,const DMDACoor2d *c,PetscScalar *f)
399 {
400   const PetscReal Lx = 1.,Ly = 1.;
401   PetscReal x = PetscRealPart(c->x), y = PetscRealPart(c->y);
402   f[0] = (2*PetscSqr(x)*(PetscSqr(x)-PetscSqr(Lx))*(PetscSqr(Ly)-6*PetscSqr(y))
403           + 2*PetscSqr(y)*(PetscSqr(Lx)-6*PetscSqr(x))*(PetscSqr(y)-PetscSqr(Ly))
404           - user->param*PetscExpReal((PetscPowReal(x,4)-PetscSqr(Lx)*PetscSqr(x))*(PetscPowReal(y,4)-PetscSqr(Ly)*PetscSqr(y))));
405   return 0;
406 }
407 
408 /* ------------------------------------------------------------------- */
409 /*
410    FormFunctionLocal - Evaluates nonlinear function, F(x) on local process patch
411 
412  */
413 PetscErrorCode FormFunctionLocal(DMDALocalInfo *info,PetscScalar **x,PetscScalar **f,AppCtx *user)
414 {
415   PetscInt       i,j;
416   PetscReal      lambda,hx,hy,hxdhy,hydhx;
417   PetscScalar    u,ue,uw,un,us,uxx,uyy,mms_solution,mms_forcing;
418   DMDACoor2d     c;
419 
420   PetscFunctionBeginUser;
421   lambda = user->param;
422   hx     = 1.0/(PetscReal)(info->mx-1);
423   hy     = 1.0/(PetscReal)(info->my-1);
424   hxdhy  = hx/hy;
425   hydhx  = hy/hx;
426   /*
427      Compute function over the locally owned part of the grid
428   */
429   for (j=info->ys; j<info->ys+info->ym; j++) {
430     for (i=info->xs; i<info->xs+info->xm; i++) {
431       if (i == 0 || j == 0 || i == info->mx-1 || j == info->my-1) {
432         c.x = i*hx; c.y = j*hy;
433         CHKERRQ(user->mms_solution(user,&c,&mms_solution));
434         f[j][i] = 2.0*(hydhx+hxdhy)*(x[j][i] - mms_solution);
435       } else {
436         u  = x[j][i];
437         uw = x[j][i-1];
438         ue = x[j][i+1];
439         un = x[j-1][i];
440         us = x[j+1][i];
441 
442         /* Enforce boundary conditions at neighboring points -- setting these values causes the Jacobian to be symmetric. */
443         if (i-1 == 0) {c.x = (i-1)*hx; c.y = j*hy; CHKERRQ(user->mms_solution(user,&c,&uw));}
444         if (i+1 == info->mx-1) {c.x = (i+1)*hx; c.y = j*hy; CHKERRQ(user->mms_solution(user,&c,&ue));}
445         if (j-1 == 0) {c.x = i*hx; c.y = (j-1)*hy; CHKERRQ(user->mms_solution(user,&c,&un));}
446         if (j+1 == info->my-1) {c.x = i*hx; c.y = (j+1)*hy; CHKERRQ(user->mms_solution(user,&c,&us));}
447 
448         uxx     = (2.0*u - uw - ue)*hydhx;
449         uyy     = (2.0*u - un - us)*hxdhy;
450         mms_forcing = 0;
451         c.x = i*hx; c.y = j*hy;
452         if (user->mms_forcing) CHKERRQ(user->mms_forcing(user,&c,&mms_forcing));
453         f[j][i] = uxx + uyy - hx*hy*(lambda*PetscExpScalar(u) + mms_forcing);
454       }
455     }
456   }
457   CHKERRQ(PetscLogFlops(11.0*info->ym*info->xm));
458   PetscFunctionReturn(0);
459 }
460 
461 /* FormObjectiveLocal - Evaluates nonlinear function, F(x) on local process patch */
462 PetscErrorCode FormObjectiveLocal(DMDALocalInfo *info,PetscScalar **x,PetscReal *obj,AppCtx *user)
463 {
464   PetscInt       i,j;
465   PetscReal      lambda,hx,hy,hxdhy,hydhx,sc,lobj=0;
466   PetscScalar    u,ue,uw,un,us,uxux,uyuy;
467   MPI_Comm       comm;
468 
469   PetscFunctionBeginUser;
470   *obj   = 0;
471   CHKERRQ(PetscObjectGetComm((PetscObject)info->da,&comm));
472   lambda = user->param;
473   hx     = 1.0/(PetscReal)(info->mx-1);
474   hy     = 1.0/(PetscReal)(info->my-1);
475   sc     = hx*hy*lambda;
476   hxdhy  = hx/hy;
477   hydhx  = hy/hx;
478   /*
479      Compute function over the locally owned part of the grid
480   */
481   for (j=info->ys; j<info->ys+info->ym; j++) {
482     for (i=info->xs; i<info->xs+info->xm; i++) {
483       if (i == 0 || j == 0 || i == info->mx-1 || j == info->my-1) {
484         lobj += PetscRealPart((hydhx + hxdhy)*x[j][i]*x[j][i]);
485       } else {
486         u  = x[j][i];
487         uw = x[j][i-1];
488         ue = x[j][i+1];
489         un = x[j-1][i];
490         us = x[j+1][i];
491 
492         if (i-1 == 0) uw = 0.;
493         if (i+1 == info->mx-1) ue = 0.;
494         if (j-1 == 0) un = 0.;
495         if (j+1 == info->my-1) us = 0.;
496 
497         /* F[u] = 1/2\int_{\omega}\nabla^2u(x)*u(x)*dx */
498 
499         uxux = u*(2.*u - ue - uw)*hydhx;
500         uyuy = u*(2.*u - un - us)*hxdhy;
501 
502         lobj += PetscRealPart(0.5*(uxux + uyuy) - sc*PetscExpScalar(u));
503       }
504     }
505   }
506   CHKERRQ(PetscLogFlops(12.0*info->ym*info->xm));
507   CHKERRMPI(MPI_Allreduce(&lobj,obj,1,MPIU_REAL,MPIU_SUM,comm));
508   PetscFunctionReturn(0);
509 }
510 
511 /*
512    FormJacobianLocal - Evaluates Jacobian matrix on local process patch
513 */
514 PetscErrorCode FormJacobianLocal(DMDALocalInfo *info,PetscScalar **x,Mat jac,Mat jacpre,AppCtx *user)
515 {
516   PetscInt       i,j,k;
517   MatStencil     col[5],row;
518   PetscScalar    lambda,v[5],hx,hy,hxdhy,hydhx,sc;
519   DM             coordDA;
520   Vec            coordinates;
521   DMDACoor2d   **coords;
522 
523   PetscFunctionBeginUser;
524   lambda = user->param;
525   /* Extract coordinates */
526   CHKERRQ(DMGetCoordinateDM(info->da, &coordDA));
527   CHKERRQ(DMGetCoordinates(info->da, &coordinates));
528   CHKERRQ(DMDAVecGetArray(coordDA, coordinates, &coords));
529   hx     = info->xm > 1 ? PetscRealPart(coords[info->ys][info->xs+1].x) - PetscRealPart(coords[info->ys][info->xs].x) : 1.0;
530   hy     = info->ym > 1 ? PetscRealPart(coords[info->ys+1][info->xs].y) - PetscRealPart(coords[info->ys][info->xs].y) : 1.0;
531   CHKERRQ(DMDAVecRestoreArray(coordDA, coordinates, &coords));
532   hxdhy  = hx/hy;
533   hydhx  = hy/hx;
534   sc     = hx*hy*lambda;
535 
536   /*
537      Compute entries for the locally owned part of the Jacobian.
538       - Currently, all PETSc parallel matrix formats are partitioned by
539         contiguous chunks of rows across the processors.
540       - Each processor needs to insert only elements that it owns
541         locally (but any non-local elements will be sent to the
542         appropriate processor during matrix assembly).
543       - Here, we set all entries for a particular row at once.
544       - We can set matrix entries either using either
545         MatSetValuesLocal() or MatSetValues(), as discussed above.
546   */
547   for (j=info->ys; j<info->ys+info->ym; j++) {
548     for (i=info->xs; i<info->xs+info->xm; i++) {
549       row.j = j; row.i = i;
550       /* boundary points */
551       if (i == 0 || j == 0 || i == info->mx-1 || j == info->my-1) {
552         v[0] =  2.0*(hydhx + hxdhy);
553         CHKERRQ(MatSetValuesStencil(jacpre,1,&row,1,&row,v,INSERT_VALUES));
554       } else {
555         k = 0;
556         /* interior grid points */
557         if (j-1 != 0) {
558           v[k]     = -hxdhy;
559           col[k].j = j - 1; col[k].i = i;
560           k++;
561         }
562         if (i-1 != 0) {
563           v[k]     = -hydhx;
564           col[k].j = j;     col[k].i = i-1;
565           k++;
566         }
567 
568         v[k] = 2.0*(hydhx + hxdhy) - sc*PetscExpScalar(x[j][i]); col[k].j = row.j; col[k].i = row.i; k++;
569 
570         if (i+1 != info->mx-1) {
571           v[k]     = -hydhx;
572           col[k].j = j;     col[k].i = i+1;
573           k++;
574         }
575         if (j+1 != info->mx-1) {
576           v[k]     = -hxdhy;
577           col[k].j = j + 1; col[k].i = i;
578           k++;
579         }
580         CHKERRQ(MatSetValuesStencil(jacpre,1,&row,k,col,v,INSERT_VALUES));
581       }
582     }
583   }
584 
585   /*
586      Assemble matrix, using the 2-step process:
587        MatAssemblyBegin(), MatAssemblyEnd().
588   */
589   CHKERRQ(MatAssemblyBegin(jacpre,MAT_FINAL_ASSEMBLY));
590   CHKERRQ(MatAssemblyEnd(jacpre,MAT_FINAL_ASSEMBLY));
591   /*
592      Tell the matrix we will never add a new nonzero location to the
593      matrix. If we do, it will generate an error.
594   */
595   CHKERRQ(MatSetOption(jac,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE));
596   PetscFunctionReturn(0);
597 }
598 
599 PetscErrorCode FormFunctionMatlab(SNES snes,Vec X,Vec F,void *ptr)
600 {
601 #if PetscDefined(HAVE_MATLAB_ENGINE)
602   AppCtx         *user = (AppCtx*)ptr;
603   PetscInt       Mx,My;
604   PetscReal      lambda,hx,hy;
605   Vec            localX,localF;
606   MPI_Comm       comm;
607   DM             da;
608 
609   PetscFunctionBeginUser;
610   CHKERRQ(SNESGetDM(snes,&da));
611   CHKERRQ(DMGetLocalVector(da,&localX));
612   CHKERRQ(DMGetLocalVector(da,&localF));
613   CHKERRQ(PetscObjectSetName((PetscObject)localX,"localX"));
614   CHKERRQ(PetscObjectSetName((PetscObject)localF,"localF"));
615   CHKERRQ(DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE));
616 
617   lambda = user->param;
618   hx     = 1.0/(PetscReal)(Mx-1);
619   hy     = 1.0/(PetscReal)(My-1);
620 
621   CHKERRQ(PetscObjectGetComm((PetscObject)snes,&comm));
622   /*
623      Scatter ghost points to local vector,using the 2-step process
624         DMGlobalToLocalBegin(),DMGlobalToLocalEnd().
625      By placing code between these two statements, computations can be
626      done while messages are in transition.
627   */
628   CHKERRQ(DMGlobalToLocalBegin(da,X,INSERT_VALUES,localX));
629   CHKERRQ(DMGlobalToLocalEnd(da,X,INSERT_VALUES,localX));
630   CHKERRQ(PetscMatlabEnginePut(PETSC_MATLAB_ENGINE_(comm),(PetscObject)localX));
631   CHKERRQ(PetscMatlabEngineEvaluate(PETSC_MATLAB_ENGINE_(comm),"localF=ex5m(localX,%18.16e,%18.16e,%18.16e)",hx,hy,lambda));
632   CHKERRQ(PetscMatlabEngineGet(PETSC_MATLAB_ENGINE_(comm),(PetscObject)localF));
633 
634   /*
635      Insert values into global vector
636   */
637   CHKERRQ(DMLocalToGlobalBegin(da,localF,INSERT_VALUES,F));
638   CHKERRQ(DMLocalToGlobalEnd(da,localF,INSERT_VALUES,F));
639   CHKERRQ(DMRestoreLocalVector(da,&localX));
640   CHKERRQ(DMRestoreLocalVector(da,&localF));
641   PetscFunctionReturn(0);
642 #else
643     return 0;                     /* Never called */
644 #endif
645 }
646 
647 /* ------------------------------------------------------------------- */
648 /*
649       Applies some sweeps on nonlinear Gauss-Seidel on each process
650 
651  */
652 PetscErrorCode NonlinearGS(SNES snes,Vec X, Vec B, void *ctx)
653 {
654   PetscInt       i,j,k,Mx,My,xs,ys,xm,ym,its,tot_its,sweeps,l;
655   PetscReal      lambda,hx,hy,hxdhy,hydhx,sc;
656   PetscScalar    **x,**b,bij,F,F0=0,J,u,un,us,ue,eu,uw,uxx,uyy,y;
657   PetscReal      atol,rtol,stol;
658   DM             da;
659   AppCtx         *user;
660   Vec            localX,localB;
661 
662   PetscFunctionBeginUser;
663   tot_its = 0;
664   CHKERRQ(SNESNGSGetSweeps(snes,&sweeps));
665   CHKERRQ(SNESNGSGetTolerances(snes,&atol,&rtol,&stol,&its));
666   CHKERRQ(SNESGetDM(snes,&da));
667   CHKERRQ(DMGetApplicationContext(da,&user));
668 
669   CHKERRQ(DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE));
670 
671   lambda = user->param;
672   hx     = 1.0/(PetscReal)(Mx-1);
673   hy     = 1.0/(PetscReal)(My-1);
674   sc     = hx*hy*lambda;
675   hxdhy  = hx/hy;
676   hydhx  = hy/hx;
677 
678   CHKERRQ(DMGetLocalVector(da,&localX));
679   if (B) {
680     CHKERRQ(DMGetLocalVector(da,&localB));
681   }
682   for (l=0; l<sweeps; l++) {
683 
684     CHKERRQ(DMGlobalToLocalBegin(da,X,INSERT_VALUES,localX));
685     CHKERRQ(DMGlobalToLocalEnd(da,X,INSERT_VALUES,localX));
686     if (B) {
687       CHKERRQ(DMGlobalToLocalBegin(da,B,INSERT_VALUES,localB));
688       CHKERRQ(DMGlobalToLocalEnd(da,B,INSERT_VALUES,localB));
689     }
690     /*
691      Get a pointer to vector data.
692      - For default PETSc vectors, VecGetArray() returns a pointer to
693      the data array.  Otherwise, the routine is implementation dependent.
694      - You MUST call VecRestoreArray() when you no longer need access to
695      the array.
696      */
697     CHKERRQ(DMDAVecGetArray(da,localX,&x));
698     if (B) CHKERRQ(DMDAVecGetArray(da,localB,&b));
699     /*
700      Get local grid boundaries (for 2-dimensional DMDA):
701      xs, ys   - starting grid indices (no ghost points)
702      xm, ym   - widths of local grid (no ghost points)
703      */
704     CHKERRQ(DMDAGetCorners(da,&xs,&ys,NULL,&xm,&ym,NULL));
705 
706     for (j=ys; j<ys+ym; j++) {
707       for (i=xs; i<xs+xm; i++) {
708         if (i == 0 || j == 0 || i == Mx-1 || j == My-1) {
709           /* boundary conditions are all zero Dirichlet */
710           x[j][i] = 0.0;
711         } else {
712           if (B) bij = b[j][i];
713           else   bij = 0.;
714 
715           u  = x[j][i];
716           un = x[j-1][i];
717           us = x[j+1][i];
718           ue = x[j][i-1];
719           uw = x[j][i+1];
720 
721           for (k=0; k<its; k++) {
722             eu  = PetscExpScalar(u);
723             uxx = (2.0*u - ue - uw)*hydhx;
724             uyy = (2.0*u - un - us)*hxdhy;
725             F   = uxx + uyy - sc*eu - bij;
726             if (k == 0) F0 = F;
727             J  = 2.0*(hydhx + hxdhy) - sc*eu;
728             y  = F/J;
729             u -= y;
730             tot_its++;
731 
732             if (atol > PetscAbsReal(PetscRealPart(F)) ||
733                 rtol*PetscAbsReal(PetscRealPart(F0)) > PetscAbsReal(PetscRealPart(F)) ||
734                 stol*PetscAbsReal(PetscRealPart(u)) > PetscAbsReal(PetscRealPart(y))) {
735               break;
736             }
737           }
738           x[j][i] = u;
739         }
740       }
741     }
742     /*
743      Restore vector
744      */
745     CHKERRQ(DMDAVecRestoreArray(da,localX,&x));
746     CHKERRQ(DMLocalToGlobalBegin(da,localX,INSERT_VALUES,X));
747     CHKERRQ(DMLocalToGlobalEnd(da,localX,INSERT_VALUES,X));
748   }
749   CHKERRQ(PetscLogFlops(tot_its*(21.0)));
750   CHKERRQ(DMRestoreLocalVector(da,&localX));
751   if (B) {
752     CHKERRQ(DMDAVecRestoreArray(da,localB,&b));
753     CHKERRQ(DMRestoreLocalVector(da,&localB));
754   }
755   PetscFunctionReturn(0);
756 }
757 
758 /*TEST
759 
760    test:
761      suffix: asm_0
762      requires: !single
763      args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 2 -pc_asm_overlap 0 -pc_asm_local_type additive -sub_pc_type lu
764 
765    test:
766      suffix: msm_0
767      requires: !single
768      args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 2 -pc_asm_overlap 0 -pc_asm_local_type multiplicative -sub_pc_type lu
769 
770    test:
771      suffix: asm_1
772      requires: !single
773      args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 2 -pc_asm_overlap 0 -pc_asm_local_type additive -sub_pc_type lu -da_grid_x 8
774 
775    test:
776      suffix: msm_1
777      requires: !single
778      args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 2 -pc_asm_overlap 0 -pc_asm_local_type multiplicative -sub_pc_type lu -da_grid_x 8
779 
780    test:
781      suffix: asm_2
782      requires: !single
783      args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 3 -pc_asm_overlap 0 -pc_asm_local_type additive -sub_pc_type lu -da_grid_x 8
784 
785    test:
786      suffix: msm_2
787      requires: !single
788      args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 3 -pc_asm_overlap 0 -pc_asm_local_type multiplicative -sub_pc_type lu -da_grid_x 8
789 
790    test:
791      suffix: asm_3
792      requires: !single
793      args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 4 -pc_asm_overlap 0 -pc_asm_local_type additive -sub_pc_type lu -da_grid_x 8
794 
795    test:
796      suffix: msm_3
797      requires: !single
798      args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 4 -pc_asm_overlap 0 -pc_asm_local_type multiplicative -sub_pc_type lu -da_grid_x 8
799 
800    test:
801      suffix: asm_4
802      requires: !single
803      nsize: 2
804      args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 2 -pc_asm_overlap 0 -pc_asm_local_type additive -sub_pc_type lu -da_grid_x 8
805 
806    test:
807      suffix: msm_4
808      requires: !single
809      nsize: 2
810      args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 2 -pc_asm_overlap 0 -pc_asm_local_type multiplicative -sub_pc_type lu -da_grid_x 8
811 
812    test:
813      suffix: asm_5
814      requires: !single
815      nsize: 2
816      args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 4 -pc_asm_overlap 0 -pc_asm_local_type additive -sub_pc_type lu -da_grid_x 8
817 
818    test:
819      suffix: msm_5
820      requires: !single
821      nsize: 2
822      args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 4 -pc_asm_overlap 0 -pc_asm_local_type multiplicative -sub_pc_type lu -da_grid_x 8
823 
824    test:
825      args: -snes_rtol 1.e-5 -pc_type mg -ksp_monitor_short -snes_view -pc_mg_levels 3 -pc_mg_galerkin pmat -da_grid_x 17 -da_grid_y 17 -mg_levels_ksp_monitor_short -mg_levels_ksp_norm_type unpreconditioned -snes_monitor_short -mg_levels_ksp_chebyshev_esteig 0.5,1.1 -mg_levels_pc_type sor -pc_mg_type full
826      requires: !single
827 
828    test:
829      suffix: 2
830      args: -pc_type mg -ksp_converged_reason -snes_view -pc_mg_galerkin pmat -snes_grid_sequence 3 -mg_levels_ksp_norm_type unpreconditioned -snes_monitor_short -mg_levels_ksp_chebyshev_esteig 0.5,1.1 -mg_levels_pc_type sor -pc_mg_type full -ksp_atol -1.
831 
832    test:
833      suffix: 3
834      nsize: 2
835      args: -snes_grid_sequence 2 -snes_mf_operator -snes_converged_reason -snes_view -pc_type mg -snes_atol -1 -snes_rtol 1.e-2
836      filter: grep -v "otal number of function evaluations"
837 
838    test:
839      suffix: 4
840      nsize: 2
841      args: -snes_grid_sequence 2 -snes_monitor_short -ksp_converged_reason -snes_converged_reason -snes_view -pc_type mg -snes_atol -1 -ksp_atol -1
842 
843    test:
844      suffix: 5_anderson
845      args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type anderson
846 
847    test:
848      suffix: 5_aspin
849      nsize: 4
850      args: -snes_monitor_short -ksp_monitor_short -snes_converged_reason -da_refine 4 -da_overlap 3 -snes_type aspin -snes_view
851 
852    test:
853      suffix: 5_broyden
854      args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type qn -snes_qn_type broyden -snes_qn_m 10
855 
856    test:
857      suffix: 5_fas
858      args: -fas_coarse_snes_max_it 1 -fas_coarse_pc_type lu -fas_coarse_ksp_type preonly -snes_monitor_short -snes_type fas -fas_coarse_ksp_type richardson -da_refine 6
859      requires: !single
860 
861    test:
862      suffix: 5_fas_additive
863      args: -fas_coarse_snes_max_it 1 -fas_coarse_pc_type lu -fas_coarse_ksp_type preonly -snes_monitor_short -snes_type fas -fas_coarse_ksp_type richardson -da_refine 6 -snes_fas_type additive -snes_max_it 50
864 
865    test:
866      suffix: 5_fas_monitor
867      args: -da_refine 1 -snes_type fas -snes_fas_monitor
868      requires: !single
869 
870    test:
871      suffix: 5_ls
872      args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type newtonls
873 
874    test:
875      suffix: 5_ls_sell_sor
876      args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type newtonls -dm_mat_type sell -pc_type sor
877      output_file: output/ex5_5_ls.out
878 
879    test:
880      suffix: 5_nasm
881      nsize: 4
882      args: -snes_monitor_short -snes_converged_reason -da_refine 4 -da_overlap 3 -snes_type nasm -snes_nasm_type restrict -snes_max_it 10
883 
884    test:
885      suffix: 5_ncg
886      args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type ncg -snes_ncg_type fr
887 
888    test:
889      suffix: 5_newton_asm_dmda
890      nsize: 4
891      args: -snes_monitor_short -ksp_monitor_short -snes_converged_reason -da_refine 4 -da_overlap 3 -snes_type newtonls -pc_type asm -pc_asm_dm_subdomains -malloc_dump
892      requires: !single
893 
894    test:
895      suffix: 5_newton_gasm_dmda
896      nsize: 4
897      args: -snes_monitor_short -ksp_monitor_short -snes_converged_reason -da_refine 4 -da_overlap 3 -snes_type newtonls -pc_type gasm -malloc_dump
898      requires: !single
899 
900    test:
901      suffix: 5_ngmres
902      args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type ngmres -snes_ngmres_m 10
903 
904    test:
905      suffix: 5_ngmres_fas
906      args: -snes_rtol 1.e-4 -snes_type ngmres -npc_fas_coarse_snes_max_it 1 -npc_fas_coarse_snes_type newtonls -npc_fas_coarse_pc_type lu -npc_fas_coarse_ksp_type preonly -snes_ngmres_m 10 -snes_monitor_short -npc_snes_max_it 1 -npc_snes_type fas -npc_fas_coarse_ksp_type richardson -da_refine 6
907 
908    test:
909      suffix: 5_ngmres_ngs
910      args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type ngmres -npc_snes_type ngs -npc_snes_max_it 1
911 
912    test:
913      suffix: 5_ngmres_nrichardson
914      args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type ngmres -snes_ngmres_m 10 -npc_snes_type nrichardson -npc_snes_max_it 3
915      output_file: output/ex5_5_ngmres_richardson.out
916 
917    test:
918      suffix: 5_nrichardson
919      args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type nrichardson
920 
921    test:
922      suffix: 5_qn
923      args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type qn -snes_linesearch_type cp -snes_qn_m 10
924 
925    test:
926      suffix: 6
927      nsize: 4
928      args: -snes_converged_reason -ksp_converged_reason -da_grid_x 129 -da_grid_y 129 -pc_type mg -pc_mg_levels 8 -mg_levels_ksp_type chebyshev -mg_levels_ksp_chebyshev_esteig 0,0.5,0,1.1 -mg_levels_ksp_max_it 2
929 
930    test:
931      requires: complex !single
932      suffix: complex
933      args: -snes_mf_operator -mat_mffd_complex -snes_monitor
934 
935 TEST*/
936