xref: /petsc/src/snes/tutorials/ex5.c (revision d0609ced746bc51b019815ca91d747429db24893)
1 static char help[] = "Bratu nonlinear PDE in 2d.\n\
2 We solve the  Bratu (SFI - solid fuel ignition) problem in a 2D rectangular\n\
3 domain, using distributed arrays (DMDAs) to partition the parallel grid.\n\
4 The command line options include:\n\
5   -par <parameter>, where <parameter> indicates the problem's nonlinearity\n\
6      problem SFI:  <parameter> = Bratu parameter (0 <= par <= 6.81)\n\n\
7   -m_par/n_par <parameter>, where <parameter> indicates an integer\n \
8       that MMS3 will be evaluated with 2^m_par, 2^n_par";
9 
10 /* ------------------------------------------------------------------------
11 
12     Solid Fuel Ignition (SFI) problem.  This problem is modeled by
13     the partial differential equation
14 
15             -Laplacian u - lambda*exp(u) = 0,  0 < x,y < 1,
16 
17     with boundary conditions
18 
19              u = 0  for  x = 0, x = 1, y = 0, y = 1.
20 
21     A finite difference approximation with the usual 5-point stencil
22     is used to discretize the boundary value problem to obtain a nonlinear
23     system of equations.
24 
25       This example shows how geometric multigrid can be run transparently with a nonlinear solver so long
26       as SNESSetDM() is provided. Example usage
27 
28       ./ex5 -pc_type mg -ksp_monitor  -snes_view -pc_mg_levels 3 -pc_mg_galerkin pmat -da_grid_x 17 -da_grid_y 17
29              -mg_levels_ksp_monitor -snes_monitor -mg_levels_pc_type sor -pc_mg_type full
30 
31       or to run with grid sequencing on the nonlinear problem (note that you do not need to provide the number of
32          multigrid levels, it will be determined automatically based on the number of refinements done)
33 
34       ./ex5 -pc_type mg -ksp_monitor  -snes_view -pc_mg_galerkin pmat -snes_grid_sequence 3
35              -mg_levels_ksp_monitor -snes_monitor -mg_levels_pc_type sor -pc_mg_type full
36 
37   ------------------------------------------------------------------------- */
38 
39 /*
40    Include "petscdmda.h" so that we can use distributed arrays (DMDAs).
41    Include "petscsnes.h" so that we can use SNES solvers.  Note that this
42 */
43 #include <petscdm.h>
44 #include <petscdmda.h>
45 #include <petscsnes.h>
46 #include <petscmatlab.h>
47 #include <petsc/private/snesimpl.h> /* For SNES_Solve event */
48 
49 /*
50    User-defined application context - contains data needed by the
51    application-provided call-back routines, FormJacobianLocal() and
52    FormFunctionLocal().
53 */
54 typedef struct AppCtx AppCtx;
55 struct AppCtx {
56   PetscReal param;          /* test problem parameter */
57   PetscInt  m,n;            /* MMS3 parameters */
58   PetscErrorCode (*mms_solution)(AppCtx*,const DMDACoor2d*,PetscScalar*);
59   PetscErrorCode (*mms_forcing)(AppCtx*,const DMDACoor2d*,PetscScalar*);
60 };
61 
62 /*
63    User-defined routines
64 */
65 extern PetscErrorCode FormInitialGuess(DM,AppCtx*,Vec);
66 extern PetscErrorCode FormFunctionLocal(DMDALocalInfo*,PetscScalar**,PetscScalar**,AppCtx*);
67 extern PetscErrorCode FormExactSolution(DM,AppCtx*,Vec);
68 extern PetscErrorCode ZeroBCSolution(AppCtx*,const DMDACoor2d*,PetscScalar*);
69 extern PetscErrorCode MMSSolution1(AppCtx*,const DMDACoor2d*,PetscScalar*);
70 extern PetscErrorCode MMSForcing1(AppCtx*,const DMDACoor2d*,PetscScalar*);
71 extern PetscErrorCode MMSSolution2(AppCtx*,const DMDACoor2d*,PetscScalar*);
72 extern PetscErrorCode MMSForcing2(AppCtx*,const DMDACoor2d*,PetscScalar*);
73 extern PetscErrorCode MMSSolution3(AppCtx*,const DMDACoor2d*,PetscScalar*);
74 extern PetscErrorCode MMSForcing3(AppCtx*,const DMDACoor2d*,PetscScalar*);
75 extern PetscErrorCode MMSSolution4(AppCtx*,const DMDACoor2d*,PetscScalar*);
76 extern PetscErrorCode MMSForcing4(AppCtx*,const DMDACoor2d*,PetscScalar*);
77 extern PetscErrorCode FormJacobianLocal(DMDALocalInfo*,PetscScalar**,Mat,Mat,AppCtx*);
78 extern PetscErrorCode FormObjectiveLocal(DMDALocalInfo*,PetscScalar**,PetscReal*,AppCtx*);
79 extern PetscErrorCode FormFunctionMatlab(SNES,Vec,Vec,void*);
80 extern PetscErrorCode NonlinearGS(SNES,Vec,Vec,void*);
81 
82 int main(int argc,char **argv)
83 {
84   SNES           snes;                         /* nonlinear solver */
85   Vec            x;                            /* solution vector */
86   AppCtx         user;                         /* user-defined work context */
87   PetscInt       its;                          /* iterations for convergence */
88   PetscReal      bratu_lambda_max = 6.81;
89   PetscReal      bratu_lambda_min = 0.;
90   PetscInt       MMS              = 0;
91   PetscBool      flg              = PETSC_FALSE;
92   DM             da;
93   Vec            r               = NULL;
94   KSP            ksp;
95   PetscInt       lits,slits;
96 
97   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
98      Initialize program
99      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
100 
101   PetscCall(PetscInitialize(&argc,&argv,(char*)0,help));
102 
103   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
104      Initialize problem parameters
105   - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
106   user.param = 6.0;
107   PetscCall(PetscOptionsGetReal(NULL,NULL,"-par",&user.param,NULL));
108   PetscCheck(user.param <= bratu_lambda_max && user.param >= bratu_lambda_min,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Lambda, %g, is out of range, [%g, %g]", (double)user.param, (double)bratu_lambda_min, (double)bratu_lambda_max);
109   PetscCall(PetscOptionsGetInt(NULL,NULL,"-mms",&MMS,NULL));
110   if (MMS == 3) {
111     PetscInt mPar = 2, nPar = 1;
112     PetscCall(PetscOptionsGetInt(NULL,NULL,"-m_par",&mPar,NULL));
113     PetscCall(PetscOptionsGetInt(NULL,NULL,"-n_par",&nPar,NULL));
114     user.m = PetscPowInt(2,mPar);
115     user.n = PetscPowInt(2,nPar);
116   }
117 
118   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
119      Create nonlinear solver context
120      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
121   PetscCall(SNESCreate(PETSC_COMM_WORLD,&snes));
122   PetscCall(SNESSetCountersReset(snes,PETSC_FALSE));
123   PetscCall(SNESSetNGS(snes, NonlinearGS, NULL));
124 
125   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
126      Create distributed array (DMDA) to manage parallel grid and vectors
127   - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
128   PetscCall(DMDACreate2d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE,DMDA_STENCIL_STAR,4,4,PETSC_DECIDE,PETSC_DECIDE,1,1,NULL,NULL,&da));
129   PetscCall(DMSetFromOptions(da));
130   PetscCall(DMSetUp(da));
131   PetscCall(DMDASetUniformCoordinates(da, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0));
132   PetscCall(DMSetApplicationContext(da,&user));
133   PetscCall(SNESSetDM(snes,da));
134   /*  - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
135      Extract global vectors from DMDA; then duplicate for remaining
136      vectors that are the same types
137    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
138   PetscCall(DMCreateGlobalVector(da,&x));
139 
140   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
141      Set local function evaluation routine
142   - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
143   user.mms_solution = ZeroBCSolution;
144   switch (MMS) {
145   case 0: user.mms_solution = NULL; user.mms_forcing = NULL;
146   case 1: user.mms_solution = MMSSolution1; user.mms_forcing = MMSForcing1; break;
147   case 2: user.mms_solution = MMSSolution2; user.mms_forcing = MMSForcing2; break;
148   case 3: user.mms_solution = MMSSolution3; user.mms_forcing = MMSForcing3; break;
149   case 4: user.mms_solution = MMSSolution4; user.mms_forcing = MMSForcing4; break;
150   default: SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_USER,"Unknown MMS type %d",MMS);
151   }
152   PetscCall(DMDASNESSetFunctionLocal(da,INSERT_VALUES,(DMDASNESFunction)FormFunctionLocal,&user));
153   PetscCall(PetscOptionsGetBool(NULL,NULL,"-fd",&flg,NULL));
154   if (!flg) {
155     PetscCall(DMDASNESSetJacobianLocal(da,(DMDASNESJacobian)FormJacobianLocal,&user));
156   }
157 
158   PetscCall(PetscOptionsGetBool(NULL,NULL,"-obj",&flg,NULL));
159   if (flg) {
160     PetscCall(DMDASNESSetObjectiveLocal(da,(DMDASNESObjective)FormObjectiveLocal,&user));
161   }
162 
163   if (PetscDefined(HAVE_MATLAB_ENGINE)) {
164     PetscBool matlab_function = PETSC_FALSE;
165     PetscCall(PetscOptionsGetBool(NULL,NULL,"-matlab_function",&matlab_function,0));
166     if (matlab_function) {
167       PetscCall(VecDuplicate(x,&r));
168       PetscCall(SNESSetFunction(snes,r,FormFunctionMatlab,&user));
169     }
170   }
171 
172   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
173      Customize nonlinear solver; set runtime options
174    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
175   PetscCall(SNESSetFromOptions(snes));
176 
177   PetscCall(FormInitialGuess(da,&user,x));
178 
179   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
180      Solve nonlinear system
181      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
182   PetscCall(SNESSolve(snes,NULL,x));
183   PetscCall(SNESGetIterationNumber(snes,&its));
184 
185   PetscCall(SNESGetLinearSolveIterations(snes,&slits));
186   PetscCall(SNESGetKSP(snes,&ksp));
187   PetscCall(KSPGetTotalIterations(ksp,&lits));
188   PetscCheck(lits == slits,PETSC_COMM_WORLD,PETSC_ERR_PLIB,"Number of total linear iterations reported by SNES %D does not match reported by KSP %D",slits,lits);
189   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
190    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
191 
192   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
193      If using MMS, check the l_2 error
194    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
195   if (MMS) {
196     Vec       e;
197     PetscReal errorl2, errorinf;
198     PetscInt  N;
199 
200     PetscCall(VecDuplicate(x, &e));
201     PetscCall(PetscObjectViewFromOptions((PetscObject) x, NULL, "-sol_view"));
202     PetscCall(FormExactSolution(da, &user, e));
203     PetscCall(PetscObjectViewFromOptions((PetscObject) e, NULL, "-exact_view"));
204     PetscCall(VecAXPY(e, -1.0, x));
205     PetscCall(PetscObjectViewFromOptions((PetscObject) e, NULL, "-error_view"));
206     PetscCall(VecNorm(e, NORM_2, &errorl2));
207     PetscCall(VecNorm(e, NORM_INFINITY, &errorinf));
208     PetscCall(VecGetSize(e, &N));
209     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "N: %D error L2 %g inf %g\n", N, (double) errorl2/PetscSqrtReal((PetscReal)N), (double) errorinf));
210     PetscCall(VecDestroy(&e));
211     PetscCall(PetscLogEventSetDof(SNES_Solve, 0, N));
212     PetscCall(PetscLogEventSetError(SNES_Solve, 0, errorl2/PetscSqrtReal(N)));
213   }
214 
215   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
216      Free work space.  All PETSc objects should be destroyed when they
217      are no longer needed.
218    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
219   PetscCall(VecDestroy(&r));
220   PetscCall(VecDestroy(&x));
221   PetscCall(SNESDestroy(&snes));
222   PetscCall(DMDestroy(&da));
223   PetscCall(PetscFinalize());
224   return 0;
225 }
226 /* ------------------------------------------------------------------- */
227 /*
228    FormInitialGuess - Forms initial approximation.
229 
230    Input Parameters:
231    da - The DM
232    user - user-defined application context
233 
234    Output Parameter:
235    X - vector
236  */
237 PetscErrorCode FormInitialGuess(DM da,AppCtx *user,Vec X)
238 {
239   PetscInt       i,j,Mx,My,xs,ys,xm,ym;
240   PetscReal      lambda,temp1,temp,hx,hy;
241   PetscScalar    **x;
242 
243   PetscFunctionBeginUser;
244   PetscCall(DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE));
245 
246   lambda = user->param;
247   hx     = 1.0/(PetscReal)(Mx-1);
248   hy     = 1.0/(PetscReal)(My-1);
249   temp1  = lambda/(lambda + 1.0);
250 
251   /*
252      Get a pointer to vector data.
253        - For default PETSc vectors, VecGetArray() returns a pointer to
254          the data array.  Otherwise, the routine is implementation dependent.
255        - You MUST call VecRestoreArray() when you no longer need access to
256          the array.
257   */
258   PetscCall(DMDAVecGetArray(da,X,&x));
259 
260   /*
261      Get local grid boundaries (for 2-dimensional DMDA):
262        xs, ys   - starting grid indices (no ghost points)
263        xm, ym   - widths of local grid (no ghost points)
264 
265   */
266   PetscCall(DMDAGetCorners(da,&xs,&ys,NULL,&xm,&ym,NULL));
267 
268   /*
269      Compute initial guess over the locally owned part of the grid
270   */
271   for (j=ys; j<ys+ym; j++) {
272     temp = (PetscReal)(PetscMin(j,My-j-1))*hy;
273     for (i=xs; i<xs+xm; i++) {
274       if (i == 0 || j == 0 || i == Mx-1 || j == My-1) {
275         /* boundary conditions are all zero Dirichlet */
276         x[j][i] = 0.0;
277       } else {
278         x[j][i] = temp1*PetscSqrtReal(PetscMin((PetscReal)(PetscMin(i,Mx-i-1))*hx,temp));
279       }
280     }
281   }
282 
283   /*
284      Restore vector
285   */
286   PetscCall(DMDAVecRestoreArray(da,X,&x));
287   PetscFunctionReturn(0);
288 }
289 
290 /*
291   FormExactSolution - Forms MMS solution
292 
293   Input Parameters:
294   da - The DM
295   user - user-defined application context
296 
297   Output Parameter:
298   X - vector
299  */
300 PetscErrorCode FormExactSolution(DM da, AppCtx *user, Vec U)
301 {
302   DM             coordDA;
303   Vec            coordinates;
304   DMDACoor2d   **coords;
305   PetscScalar  **u;
306   PetscInt       xs, ys, xm, ym, i, j;
307 
308   PetscFunctionBeginUser;
309   PetscCall(DMDAGetCorners(da, &xs, &ys, NULL, &xm, &ym, NULL));
310   PetscCall(DMGetCoordinateDM(da, &coordDA));
311   PetscCall(DMGetCoordinates(da, &coordinates));
312   PetscCall(DMDAVecGetArray(coordDA, coordinates, &coords));
313   PetscCall(DMDAVecGetArray(da, U, &u));
314   for (j = ys; j < ys+ym; ++j) {
315     for (i = xs; i < xs+xm; ++i) {
316       user->mms_solution(user,&coords[j][i],&u[j][i]);
317     }
318   }
319   PetscCall(DMDAVecRestoreArray(da, U, &u));
320   PetscCall(DMDAVecRestoreArray(coordDA, coordinates, &coords));
321   PetscFunctionReturn(0);
322 }
323 
324 PetscErrorCode ZeroBCSolution(AppCtx *user,const DMDACoor2d *c,PetscScalar *u)
325 {
326   u[0] = 0.;
327   return 0;
328 }
329 
330 /* The functions below evaluate the MMS solution u(x,y) and associated forcing
331 
332      f(x,y) = -u_xx - u_yy - lambda exp(u)
333 
334   such that u(x,y) is an exact solution with f(x,y) as the right hand side forcing term.
335  */
336 PetscErrorCode MMSSolution1(AppCtx *user,const DMDACoor2d *c,PetscScalar *u)
337 {
338   PetscReal x = PetscRealPart(c->x), y = PetscRealPart(c->y);
339   u[0] = x*(1 - x)*y*(1 - y);
340   PetscLogFlops(5);
341   return 0;
342 }
343 PetscErrorCode MMSForcing1(AppCtx *user,const DMDACoor2d *c,PetscScalar *f)
344 {
345   PetscReal x = PetscRealPart(c->x), y = PetscRealPart(c->y);
346   f[0] = 2*x*(1 - x) + 2*y*(1 - y) - user->param*PetscExpReal(x*(1 - x)*y*(1 - y));
347   return 0;
348 }
349 
350 PetscErrorCode MMSSolution2(AppCtx *user,const DMDACoor2d *c,PetscScalar *u)
351 {
352   PetscReal x = PetscRealPart(c->x), y = PetscRealPart(c->y);
353   u[0] = PetscSinReal(PETSC_PI*x)*PetscSinReal(PETSC_PI*y);
354   PetscLogFlops(5);
355   return 0;
356 }
357 PetscErrorCode MMSForcing2(AppCtx *user,const DMDACoor2d *c,PetscScalar *f)
358 {
359   PetscReal x = PetscRealPart(c->x), y = PetscRealPart(c->y);
360   f[0] = 2*PetscSqr(PETSC_PI)*PetscSinReal(PETSC_PI*x)*PetscSinReal(PETSC_PI*y) - user->param*PetscExpReal(PetscSinReal(PETSC_PI*x)*PetscSinReal(PETSC_PI*y));
361   return 0;
362 }
363 
364 PetscErrorCode MMSSolution3(AppCtx *user,const DMDACoor2d *c,PetscScalar *u)
365 {
366   PetscReal x = PetscRealPart(c->x), y = PetscRealPart(c->y);
367   u[0] = PetscSinReal(user->m*PETSC_PI*x*(1-y))*PetscSinReal(user->n*PETSC_PI*y*(1-x));
368   PetscLogFlops(5);
369   return 0;
370 }
371 PetscErrorCode MMSForcing3(AppCtx *user,const DMDACoor2d *c,PetscScalar *f)
372 {
373   PetscReal x = PetscRealPart(c->x), y = PetscRealPart(c->y);
374   PetscReal m = user->m, n = user->n, lambda = user->param;
375   f[0] = (-(PetscExpReal(PetscSinReal(m*PETSC_PI*x*(1 - y))*PetscSinReal(n*PETSC_PI*(1 - x)*y))*lambda)
376           + PetscSqr(PETSC_PI)*(-2*m*n*((-1 + x)*x + (-1 + y)*y)*PetscCosReal(m*PETSC_PI*x*(-1 + y))*PetscCosReal(n*PETSC_PI*(-1 + x)*y)
377                                 + (PetscSqr(m)*(PetscSqr(x) + PetscSqr(-1 + y)) + PetscSqr(n)*(PetscSqr(-1 + x) + PetscSqr(y)))
378                                 *PetscSinReal(m*PETSC_PI*x*(-1 + y))*PetscSinReal(n*PETSC_PI*(-1 + x)*y)));
379   return 0;
380 }
381 
382 PetscErrorCode MMSSolution4(AppCtx *user,const DMDACoor2d *c,PetscScalar *u)
383 {
384   const PetscReal Lx = 1.,Ly = 1.;
385   PetscReal x = PetscRealPart(c->x), y = PetscRealPart(c->y);
386   u[0] = (PetscPowReal(x,4)-PetscSqr(Lx)*PetscSqr(x))*(PetscPowReal(y,4)-PetscSqr(Ly)*PetscSqr(y));
387   PetscLogFlops(9);
388   return 0;
389 }
390 PetscErrorCode MMSForcing4(AppCtx *user,const DMDACoor2d *c,PetscScalar *f)
391 {
392   const PetscReal Lx = 1.,Ly = 1.;
393   PetscReal x = PetscRealPart(c->x), y = PetscRealPart(c->y);
394   f[0] = (2*PetscSqr(x)*(PetscSqr(x)-PetscSqr(Lx))*(PetscSqr(Ly)-6*PetscSqr(y))
395           + 2*PetscSqr(y)*(PetscSqr(Lx)-6*PetscSqr(x))*(PetscSqr(y)-PetscSqr(Ly))
396           - user->param*PetscExpReal((PetscPowReal(x,4)-PetscSqr(Lx)*PetscSqr(x))*(PetscPowReal(y,4)-PetscSqr(Ly)*PetscSqr(y))));
397   return 0;
398 }
399 
400 /* ------------------------------------------------------------------- */
401 /*
402    FormFunctionLocal - Evaluates nonlinear function, F(x) on local process patch
403 
404  */
405 PetscErrorCode FormFunctionLocal(DMDALocalInfo *info,PetscScalar **x,PetscScalar **f,AppCtx *user)
406 {
407   PetscInt       i,j;
408   PetscReal      lambda,hx,hy,hxdhy,hydhx;
409   PetscScalar    u,ue,uw,un,us,uxx,uyy,mms_solution,mms_forcing;
410   DMDACoor2d     c;
411 
412   PetscFunctionBeginUser;
413   lambda = user->param;
414   hx     = 1.0/(PetscReal)(info->mx-1);
415   hy     = 1.0/(PetscReal)(info->my-1);
416   hxdhy  = hx/hy;
417   hydhx  = hy/hx;
418   /*
419      Compute function over the locally owned part of the grid
420   */
421   for (j=info->ys; j<info->ys+info->ym; j++) {
422     for (i=info->xs; i<info->xs+info->xm; i++) {
423       if (i == 0 || j == 0 || i == info->mx-1 || j == info->my-1) {
424         c.x = i*hx; c.y = j*hy;
425         PetscCall(user->mms_solution(user,&c,&mms_solution));
426         f[j][i] = 2.0*(hydhx+hxdhy)*(x[j][i] - mms_solution);
427       } else {
428         u  = x[j][i];
429         uw = x[j][i-1];
430         ue = x[j][i+1];
431         un = x[j-1][i];
432         us = x[j+1][i];
433 
434         /* Enforce boundary conditions at neighboring points -- setting these values causes the Jacobian to be symmetric. */
435         if (i-1 == 0) {c.x = (i-1)*hx; c.y = j*hy; PetscCall(user->mms_solution(user,&c,&uw));}
436         if (i+1 == info->mx-1) {c.x = (i+1)*hx; c.y = j*hy; PetscCall(user->mms_solution(user,&c,&ue));}
437         if (j-1 == 0) {c.x = i*hx; c.y = (j-1)*hy; PetscCall(user->mms_solution(user,&c,&un));}
438         if (j+1 == info->my-1) {c.x = i*hx; c.y = (j+1)*hy; PetscCall(user->mms_solution(user,&c,&us));}
439 
440         uxx     = (2.0*u - uw - ue)*hydhx;
441         uyy     = (2.0*u - un - us)*hxdhy;
442         mms_forcing = 0;
443         c.x = i*hx; c.y = j*hy;
444         if (user->mms_forcing) PetscCall(user->mms_forcing(user,&c,&mms_forcing));
445         f[j][i] = uxx + uyy - hx*hy*(lambda*PetscExpScalar(u) + mms_forcing);
446       }
447     }
448   }
449   PetscCall(PetscLogFlops(11.0*info->ym*info->xm));
450   PetscFunctionReturn(0);
451 }
452 
453 /* FormObjectiveLocal - Evaluates nonlinear function, F(x) on local process patch */
454 PetscErrorCode FormObjectiveLocal(DMDALocalInfo *info,PetscScalar **x,PetscReal *obj,AppCtx *user)
455 {
456   PetscInt       i,j;
457   PetscReal      lambda,hx,hy,hxdhy,hydhx,sc,lobj=0;
458   PetscScalar    u,ue,uw,un,us,uxux,uyuy;
459   MPI_Comm       comm;
460 
461   PetscFunctionBeginUser;
462   *obj   = 0;
463   PetscCall(PetscObjectGetComm((PetscObject)info->da,&comm));
464   lambda = user->param;
465   hx     = 1.0/(PetscReal)(info->mx-1);
466   hy     = 1.0/(PetscReal)(info->my-1);
467   sc     = hx*hy*lambda;
468   hxdhy  = hx/hy;
469   hydhx  = hy/hx;
470   /*
471      Compute function over the locally owned part of the grid
472   */
473   for (j=info->ys; j<info->ys+info->ym; j++) {
474     for (i=info->xs; i<info->xs+info->xm; i++) {
475       if (i == 0 || j == 0 || i == info->mx-1 || j == info->my-1) {
476         lobj += PetscRealPart((hydhx + hxdhy)*x[j][i]*x[j][i]);
477       } else {
478         u  = x[j][i];
479         uw = x[j][i-1];
480         ue = x[j][i+1];
481         un = x[j-1][i];
482         us = x[j+1][i];
483 
484         if (i-1 == 0) uw = 0.;
485         if (i+1 == info->mx-1) ue = 0.;
486         if (j-1 == 0) un = 0.;
487         if (j+1 == info->my-1) us = 0.;
488 
489         /* F[u] = 1/2\int_{\omega}\nabla^2u(x)*u(x)*dx */
490 
491         uxux = u*(2.*u - ue - uw)*hydhx;
492         uyuy = u*(2.*u - un - us)*hxdhy;
493 
494         lobj += PetscRealPart(0.5*(uxux + uyuy) - sc*PetscExpScalar(u));
495       }
496     }
497   }
498   PetscCall(PetscLogFlops(12.0*info->ym*info->xm));
499   PetscCallMPI(MPI_Allreduce(&lobj,obj,1,MPIU_REAL,MPIU_SUM,comm));
500   PetscFunctionReturn(0);
501 }
502 
503 /*
504    FormJacobianLocal - Evaluates Jacobian matrix on local process patch
505 */
506 PetscErrorCode FormJacobianLocal(DMDALocalInfo *info,PetscScalar **x,Mat jac,Mat jacpre,AppCtx *user)
507 {
508   PetscInt       i,j,k;
509   MatStencil     col[5],row;
510   PetscScalar    lambda,v[5],hx,hy,hxdhy,hydhx,sc;
511   DM             coordDA;
512   Vec            coordinates;
513   DMDACoor2d   **coords;
514 
515   PetscFunctionBeginUser;
516   lambda = user->param;
517   /* Extract coordinates */
518   PetscCall(DMGetCoordinateDM(info->da, &coordDA));
519   PetscCall(DMGetCoordinates(info->da, &coordinates));
520   PetscCall(DMDAVecGetArray(coordDA, coordinates, &coords));
521   hx     = info->xm > 1 ? PetscRealPart(coords[info->ys][info->xs+1].x) - PetscRealPart(coords[info->ys][info->xs].x) : 1.0;
522   hy     = info->ym > 1 ? PetscRealPart(coords[info->ys+1][info->xs].y) - PetscRealPart(coords[info->ys][info->xs].y) : 1.0;
523   PetscCall(DMDAVecRestoreArray(coordDA, coordinates, &coords));
524   hxdhy  = hx/hy;
525   hydhx  = hy/hx;
526   sc     = hx*hy*lambda;
527 
528   /*
529      Compute entries for the locally owned part of the Jacobian.
530       - Currently, all PETSc parallel matrix formats are partitioned by
531         contiguous chunks of rows across the processors.
532       - Each processor needs to insert only elements that it owns
533         locally (but any non-local elements will be sent to the
534         appropriate processor during matrix assembly).
535       - Here, we set all entries for a particular row at once.
536       - We can set matrix entries either using either
537         MatSetValuesLocal() or MatSetValues(), as discussed above.
538   */
539   for (j=info->ys; j<info->ys+info->ym; j++) {
540     for (i=info->xs; i<info->xs+info->xm; i++) {
541       row.j = j; row.i = i;
542       /* boundary points */
543       if (i == 0 || j == 0 || i == info->mx-1 || j == info->my-1) {
544         v[0] =  2.0*(hydhx + hxdhy);
545         PetscCall(MatSetValuesStencil(jacpre,1,&row,1,&row,v,INSERT_VALUES));
546       } else {
547         k = 0;
548         /* interior grid points */
549         if (j-1 != 0) {
550           v[k]     = -hxdhy;
551           col[k].j = j - 1; col[k].i = i;
552           k++;
553         }
554         if (i-1 != 0) {
555           v[k]     = -hydhx;
556           col[k].j = j;     col[k].i = i-1;
557           k++;
558         }
559 
560         v[k] = 2.0*(hydhx + hxdhy) - sc*PetscExpScalar(x[j][i]); col[k].j = row.j; col[k].i = row.i; k++;
561 
562         if (i+1 != info->mx-1) {
563           v[k]     = -hydhx;
564           col[k].j = j;     col[k].i = i+1;
565           k++;
566         }
567         if (j+1 != info->mx-1) {
568           v[k]     = -hxdhy;
569           col[k].j = j + 1; col[k].i = i;
570           k++;
571         }
572         PetscCall(MatSetValuesStencil(jacpre,1,&row,k,col,v,INSERT_VALUES));
573       }
574     }
575   }
576 
577   /*
578      Assemble matrix, using the 2-step process:
579        MatAssemblyBegin(), MatAssemblyEnd().
580   */
581   PetscCall(MatAssemblyBegin(jacpre,MAT_FINAL_ASSEMBLY));
582   PetscCall(MatAssemblyEnd(jacpre,MAT_FINAL_ASSEMBLY));
583   /*
584      Tell the matrix we will never add a new nonzero location to the
585      matrix. If we do, it will generate an error.
586   */
587   PetscCall(MatSetOption(jac,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE));
588   PetscFunctionReturn(0);
589 }
590 
591 PetscErrorCode FormFunctionMatlab(SNES snes,Vec X,Vec F,void *ptr)
592 {
593 #if PetscDefined(HAVE_MATLAB_ENGINE)
594   AppCtx         *user = (AppCtx*)ptr;
595   PetscInt       Mx,My;
596   PetscReal      lambda,hx,hy;
597   Vec            localX,localF;
598   MPI_Comm       comm;
599   DM             da;
600 
601   PetscFunctionBeginUser;
602   PetscCall(SNESGetDM(snes,&da));
603   PetscCall(DMGetLocalVector(da,&localX));
604   PetscCall(DMGetLocalVector(da,&localF));
605   PetscCall(PetscObjectSetName((PetscObject)localX,"localX"));
606   PetscCall(PetscObjectSetName((PetscObject)localF,"localF"));
607   PetscCall(DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE));
608 
609   lambda = user->param;
610   hx     = 1.0/(PetscReal)(Mx-1);
611   hy     = 1.0/(PetscReal)(My-1);
612 
613   PetscCall(PetscObjectGetComm((PetscObject)snes,&comm));
614   /*
615      Scatter ghost points to local vector,using the 2-step process
616         DMGlobalToLocalBegin(),DMGlobalToLocalEnd().
617      By placing code between these two statements, computations can be
618      done while messages are in transition.
619   */
620   PetscCall(DMGlobalToLocalBegin(da,X,INSERT_VALUES,localX));
621   PetscCall(DMGlobalToLocalEnd(da,X,INSERT_VALUES,localX));
622   PetscCall(PetscMatlabEnginePut(PETSC_MATLAB_ENGINE_(comm),(PetscObject)localX));
623   PetscCall(PetscMatlabEngineEvaluate(PETSC_MATLAB_ENGINE_(comm),"localF=ex5m(localX,%18.16e,%18.16e,%18.16e)",hx,hy,lambda));
624   PetscCall(PetscMatlabEngineGet(PETSC_MATLAB_ENGINE_(comm),(PetscObject)localF));
625 
626   /*
627      Insert values into global vector
628   */
629   PetscCall(DMLocalToGlobalBegin(da,localF,INSERT_VALUES,F));
630   PetscCall(DMLocalToGlobalEnd(da,localF,INSERT_VALUES,F));
631   PetscCall(DMRestoreLocalVector(da,&localX));
632   PetscCall(DMRestoreLocalVector(da,&localF));
633   PetscFunctionReturn(0);
634 #else
635     return 0;                     /* Never called */
636 #endif
637 }
638 
639 /* ------------------------------------------------------------------- */
640 /*
641       Applies some sweeps on nonlinear Gauss-Seidel on each process
642 
643  */
644 PetscErrorCode NonlinearGS(SNES snes,Vec X, Vec B, void *ctx)
645 {
646   PetscInt       i,j,k,Mx,My,xs,ys,xm,ym,its,tot_its,sweeps,l;
647   PetscReal      lambda,hx,hy,hxdhy,hydhx,sc;
648   PetscScalar    **x,**b,bij,F,F0=0,J,u,un,us,ue,eu,uw,uxx,uyy,y;
649   PetscReal      atol,rtol,stol;
650   DM             da;
651   AppCtx         *user;
652   Vec            localX,localB;
653 
654   PetscFunctionBeginUser;
655   tot_its = 0;
656   PetscCall(SNESNGSGetSweeps(snes,&sweeps));
657   PetscCall(SNESNGSGetTolerances(snes,&atol,&rtol,&stol,&its));
658   PetscCall(SNESGetDM(snes,&da));
659   PetscCall(DMGetApplicationContext(da,&user));
660 
661   PetscCall(DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE));
662 
663   lambda = user->param;
664   hx     = 1.0/(PetscReal)(Mx-1);
665   hy     = 1.0/(PetscReal)(My-1);
666   sc     = hx*hy*lambda;
667   hxdhy  = hx/hy;
668   hydhx  = hy/hx;
669 
670   PetscCall(DMGetLocalVector(da,&localX));
671   if (B) {
672     PetscCall(DMGetLocalVector(da,&localB));
673   }
674   for (l=0; l<sweeps; l++) {
675 
676     PetscCall(DMGlobalToLocalBegin(da,X,INSERT_VALUES,localX));
677     PetscCall(DMGlobalToLocalEnd(da,X,INSERT_VALUES,localX));
678     if (B) {
679       PetscCall(DMGlobalToLocalBegin(da,B,INSERT_VALUES,localB));
680       PetscCall(DMGlobalToLocalEnd(da,B,INSERT_VALUES,localB));
681     }
682     /*
683      Get a pointer to vector data.
684      - For default PETSc vectors, VecGetArray() returns a pointer to
685      the data array.  Otherwise, the routine is implementation dependent.
686      - You MUST call VecRestoreArray() when you no longer need access to
687      the array.
688      */
689     PetscCall(DMDAVecGetArray(da,localX,&x));
690     if (B) PetscCall(DMDAVecGetArray(da,localB,&b));
691     /*
692      Get local grid boundaries (for 2-dimensional DMDA):
693      xs, ys   - starting grid indices (no ghost points)
694      xm, ym   - widths of local grid (no ghost points)
695      */
696     PetscCall(DMDAGetCorners(da,&xs,&ys,NULL,&xm,&ym,NULL));
697 
698     for (j=ys; j<ys+ym; j++) {
699       for (i=xs; i<xs+xm; i++) {
700         if (i == 0 || j == 0 || i == Mx-1 || j == My-1) {
701           /* boundary conditions are all zero Dirichlet */
702           x[j][i] = 0.0;
703         } else {
704           if (B) bij = b[j][i];
705           else   bij = 0.;
706 
707           u  = x[j][i];
708           un = x[j-1][i];
709           us = x[j+1][i];
710           ue = x[j][i-1];
711           uw = x[j][i+1];
712 
713           for (k=0; k<its; k++) {
714             eu  = PetscExpScalar(u);
715             uxx = (2.0*u - ue - uw)*hydhx;
716             uyy = (2.0*u - un - us)*hxdhy;
717             F   = uxx + uyy - sc*eu - bij;
718             if (k == 0) F0 = F;
719             J  = 2.0*(hydhx + hxdhy) - sc*eu;
720             y  = F/J;
721             u -= y;
722             tot_its++;
723 
724             if (atol > PetscAbsReal(PetscRealPart(F)) ||
725                 rtol*PetscAbsReal(PetscRealPart(F0)) > PetscAbsReal(PetscRealPart(F)) ||
726                 stol*PetscAbsReal(PetscRealPart(u)) > PetscAbsReal(PetscRealPart(y))) {
727               break;
728             }
729           }
730           x[j][i] = u;
731         }
732       }
733     }
734     /*
735      Restore vector
736      */
737     PetscCall(DMDAVecRestoreArray(da,localX,&x));
738     PetscCall(DMLocalToGlobalBegin(da,localX,INSERT_VALUES,X));
739     PetscCall(DMLocalToGlobalEnd(da,localX,INSERT_VALUES,X));
740   }
741   PetscCall(PetscLogFlops(tot_its*(21.0)));
742   PetscCall(DMRestoreLocalVector(da,&localX));
743   if (B) {
744     PetscCall(DMDAVecRestoreArray(da,localB,&b));
745     PetscCall(DMRestoreLocalVector(da,&localB));
746   }
747   PetscFunctionReturn(0);
748 }
749 
750 /*TEST
751 
752    test:
753      suffix: asm_0
754      requires: !single
755      args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 2 -pc_asm_overlap 0 -pc_asm_local_type additive -sub_pc_type lu
756 
757    test:
758      suffix: msm_0
759      requires: !single
760      args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 2 -pc_asm_overlap 0 -pc_asm_local_type multiplicative -sub_pc_type lu
761 
762    test:
763      suffix: asm_1
764      requires: !single
765      args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 2 -pc_asm_overlap 0 -pc_asm_local_type additive -sub_pc_type lu -da_grid_x 8
766 
767    test:
768      suffix: msm_1
769      requires: !single
770      args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 2 -pc_asm_overlap 0 -pc_asm_local_type multiplicative -sub_pc_type lu -da_grid_x 8
771 
772    test:
773      suffix: asm_2
774      requires: !single
775      args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 3 -pc_asm_overlap 0 -pc_asm_local_type additive -sub_pc_type lu -da_grid_x 8
776 
777    test:
778      suffix: msm_2
779      requires: !single
780      args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 3 -pc_asm_overlap 0 -pc_asm_local_type multiplicative -sub_pc_type lu -da_grid_x 8
781 
782    test:
783      suffix: asm_3
784      requires: !single
785      args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 4 -pc_asm_overlap 0 -pc_asm_local_type additive -sub_pc_type lu -da_grid_x 8
786 
787    test:
788      suffix: msm_3
789      requires: !single
790      args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 4 -pc_asm_overlap 0 -pc_asm_local_type multiplicative -sub_pc_type lu -da_grid_x 8
791 
792    test:
793      suffix: asm_4
794      requires: !single
795      nsize: 2
796      args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 2 -pc_asm_overlap 0 -pc_asm_local_type additive -sub_pc_type lu -da_grid_x 8
797 
798    test:
799      suffix: msm_4
800      requires: !single
801      nsize: 2
802      args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 2 -pc_asm_overlap 0 -pc_asm_local_type multiplicative -sub_pc_type lu -da_grid_x 8
803 
804    test:
805      suffix: asm_5
806      requires: !single
807      nsize: 2
808      args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 4 -pc_asm_overlap 0 -pc_asm_local_type additive -sub_pc_type lu -da_grid_x 8
809 
810    test:
811      suffix: msm_5
812      requires: !single
813      nsize: 2
814      args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 4 -pc_asm_overlap 0 -pc_asm_local_type multiplicative -sub_pc_type lu -da_grid_x 8
815 
816    test:
817      args: -snes_rtol 1.e-5 -pc_type mg -ksp_monitor_short -snes_view -pc_mg_levels 3 -pc_mg_galerkin pmat -da_grid_x 17 -da_grid_y 17 -mg_levels_ksp_monitor_short -mg_levels_ksp_norm_type unpreconditioned -snes_monitor_short -mg_levels_ksp_chebyshev_esteig 0.5,1.1 -mg_levels_pc_type sor -pc_mg_type full
818      requires: !single
819 
820    test:
821      suffix: 2
822      args: -pc_type mg -ksp_converged_reason -snes_view -pc_mg_galerkin pmat -snes_grid_sequence 3 -mg_levels_ksp_norm_type unpreconditioned -snes_monitor_short -mg_levels_ksp_chebyshev_esteig 0.5,1.1 -mg_levels_pc_type sor -pc_mg_type full -ksp_atol -1.
823 
824    test:
825      suffix: 3
826      nsize: 2
827      args: -snes_grid_sequence 2 -snes_mf_operator -snes_converged_reason -snes_view -pc_type mg -snes_atol -1 -snes_rtol 1.e-2
828      filter: grep -v "otal number of function evaluations"
829 
830    test:
831      suffix: 4
832      nsize: 2
833      args: -snes_grid_sequence 2 -snes_monitor_short -ksp_converged_reason -snes_converged_reason -snes_view -pc_type mg -snes_atol -1 -ksp_atol -1
834 
835    test:
836      suffix: 5_anderson
837      args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type anderson
838 
839    test:
840      suffix: 5_aspin
841      nsize: 4
842      args: -snes_monitor_short -ksp_monitor_short -snes_converged_reason -da_refine 4 -da_overlap 3 -snes_type aspin -snes_view
843 
844    test:
845      suffix: 5_broyden
846      args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type qn -snes_qn_type broyden -snes_qn_m 10
847 
848    test:
849      suffix: 5_fas
850      args: -fas_coarse_snes_max_it 1 -fas_coarse_pc_type lu -fas_coarse_ksp_type preonly -snes_monitor_short -snes_type fas -fas_coarse_ksp_type richardson -da_refine 6
851      requires: !single
852 
853    test:
854      suffix: 5_fas_additive
855      args: -fas_coarse_snes_max_it 1 -fas_coarse_pc_type lu -fas_coarse_ksp_type preonly -snes_monitor_short -snes_type fas -fas_coarse_ksp_type richardson -da_refine 6 -snes_fas_type additive -snes_max_it 50
856 
857    test:
858      suffix: 5_fas_monitor
859      args: -da_refine 1 -snes_type fas -snes_fas_monitor
860      requires: !single
861 
862    test:
863      suffix: 5_ls
864      args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type newtonls
865 
866    test:
867      suffix: 5_ls_sell_sor
868      args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type newtonls -dm_mat_type sell -pc_type sor
869      output_file: output/ex5_5_ls.out
870 
871    test:
872      suffix: 5_nasm
873      nsize: 4
874      args: -snes_monitor_short -snes_converged_reason -da_refine 4 -da_overlap 3 -snes_type nasm -snes_nasm_type restrict -snes_max_it 10
875 
876    test:
877      suffix: 5_ncg
878      args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type ncg -snes_ncg_type fr
879 
880    test:
881      suffix: 5_newton_asm_dmda
882      nsize: 4
883      args: -snes_monitor_short -ksp_monitor_short -snes_converged_reason -da_refine 4 -da_overlap 3 -snes_type newtonls -pc_type asm -pc_asm_dm_subdomains -malloc_dump
884      requires: !single
885 
886    test:
887      suffix: 5_newton_gasm_dmda
888      nsize: 4
889      args: -snes_monitor_short -ksp_monitor_short -snes_converged_reason -da_refine 4 -da_overlap 3 -snes_type newtonls -pc_type gasm -malloc_dump
890      requires: !single
891 
892    test:
893      suffix: 5_ngmres
894      args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type ngmres -snes_ngmres_m 10
895 
896    test:
897      suffix: 5_ngmres_fas
898      args: -snes_rtol 1.e-4 -snes_type ngmres -npc_fas_coarse_snes_max_it 1 -npc_fas_coarse_snes_type newtonls -npc_fas_coarse_pc_type lu -npc_fas_coarse_ksp_type preonly -snes_ngmres_m 10 -snes_monitor_short -npc_snes_max_it 1 -npc_snes_type fas -npc_fas_coarse_ksp_type richardson -da_refine 6
899 
900    test:
901      suffix: 5_ngmres_ngs
902      args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type ngmres -npc_snes_type ngs -npc_snes_max_it 1
903 
904    test:
905      suffix: 5_ngmres_nrichardson
906      args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type ngmres -snes_ngmres_m 10 -npc_snes_type nrichardson -npc_snes_max_it 3
907      output_file: output/ex5_5_ngmres_richardson.out
908 
909    test:
910      suffix: 5_nrichardson
911      args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type nrichardson
912 
913    test:
914      suffix: 5_qn
915      args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type qn -snes_linesearch_type cp -snes_qn_m 10
916 
917    test:
918      suffix: 6
919      nsize: 4
920      args: -snes_converged_reason -ksp_converged_reason -da_grid_x 129 -da_grid_y 129 -pc_type mg -pc_mg_levels 8 -mg_levels_ksp_type chebyshev -mg_levels_ksp_chebyshev_esteig 0,0.5,0,1.1 -mg_levels_ksp_max_it 2
921 
922    test:
923      requires: complex !single
924      suffix: complex
925      args: -snes_mf_operator -mat_mffd_complex -snes_monitor
926 
927 TEST*/
928