xref: /petsc/src/snes/tutorials/ex5.c (revision 49762cbcca70e28a57427f7ecb626be5a9c34874)
1 static char help[] = "Bratu nonlinear PDE in 2d.\n\
2 We solve the  Bratu (SFI - solid fuel ignition) problem in a 2D rectangular\n\
3 domain, using distributed arrays (DMDAs) to partition the parallel grid.\n\
4 The command line options include:\n\
5   -par <parameter>, where <parameter> indicates the problem's nonlinearity\n\
6      problem SFI:  <parameter> = Bratu parameter (0 <= par <= 6.81)\n\n\
7   -m_par/n_par <parameter>, where <parameter> indicates an integer\n \
8       that MMS3 will be evaluated with 2^m_par, 2^n_par";
9 
10 /*T
11    Concepts: SNES^parallel Bratu example
12    Concepts: DMDA^using distributed arrays;
13    Concepts: IS coloirng types;
14    Processors: n
15 T*/
16 
17 /* ------------------------------------------------------------------------
18 
19     Solid Fuel Ignition (SFI) problem.  This problem is modeled by
20     the partial differential equation
21 
22             -Laplacian u - lambda*exp(u) = 0,  0 < x,y < 1,
23 
24     with boundary conditions
25 
26              u = 0  for  x = 0, x = 1, y = 0, y = 1.
27 
28     A finite difference approximation with the usual 5-point stencil
29     is used to discretize the boundary value problem to obtain a nonlinear
30     system of equations.
31 
32       This example shows how geometric multigrid can be run transparently with a nonlinear solver so long
33       as SNESSetDM() is provided. Example usage
34 
35       ./ex5 -pc_type mg -ksp_monitor  -snes_view -pc_mg_levels 3 -pc_mg_galerkin pmat -da_grid_x 17 -da_grid_y 17
36              -mg_levels_ksp_monitor -snes_monitor -mg_levels_pc_type sor -pc_mg_type full
37 
38       or to run with grid sequencing on the nonlinear problem (note that you do not need to provide the number of
39          multigrid levels, it will be determined automatically based on the number of refinements done)
40 
41       ./ex5 -pc_type mg -ksp_monitor  -snes_view -pc_mg_galerkin pmat -snes_grid_sequence 3
42              -mg_levels_ksp_monitor -snes_monitor -mg_levels_pc_type sor -pc_mg_type full
43 
44   ------------------------------------------------------------------------- */
45 
46 /*
47    Include "petscdmda.h" so that we can use distributed arrays (DMDAs).
48    Include "petscsnes.h" so that we can use SNES solvers.  Note that this
49 */
50 #include <petscdm.h>
51 #include <petscdmda.h>
52 #include <petscsnes.h>
53 #include <petscmatlab.h>
54 #include <petsc/private/snesimpl.h> /* For SNES_Solve event */
55 
56 /*
57    User-defined application context - contains data needed by the
58    application-provided call-back routines, FormJacobianLocal() and
59    FormFunctionLocal().
60 */
61 typedef struct AppCtx AppCtx;
62 struct AppCtx {
63   PetscReal param;          /* test problem parameter */
64   PetscInt  m,n;            /* MMS3 parameters */
65   PetscErrorCode (*mms_solution)(AppCtx*,const DMDACoor2d*,PetscScalar*);
66   PetscErrorCode (*mms_forcing)(AppCtx*,const DMDACoor2d*,PetscScalar*);
67 };
68 
69 /*
70    User-defined routines
71 */
72 extern PetscErrorCode FormInitialGuess(DM,AppCtx*,Vec);
73 extern PetscErrorCode FormFunctionLocal(DMDALocalInfo*,PetscScalar**,PetscScalar**,AppCtx*);
74 extern PetscErrorCode FormExactSolution(DM,AppCtx*,Vec);
75 extern PetscErrorCode ZeroBCSolution(AppCtx*,const DMDACoor2d*,PetscScalar*);
76 extern PetscErrorCode MMSSolution1(AppCtx*,const DMDACoor2d*,PetscScalar*);
77 extern PetscErrorCode MMSForcing1(AppCtx*,const DMDACoor2d*,PetscScalar*);
78 extern PetscErrorCode MMSSolution2(AppCtx*,const DMDACoor2d*,PetscScalar*);
79 extern PetscErrorCode MMSForcing2(AppCtx*,const DMDACoor2d*,PetscScalar*);
80 extern PetscErrorCode MMSSolution3(AppCtx*,const DMDACoor2d*,PetscScalar*);
81 extern PetscErrorCode MMSForcing3(AppCtx*,const DMDACoor2d*,PetscScalar*);
82 extern PetscErrorCode MMSSolution4(AppCtx*,const DMDACoor2d*,PetscScalar*);
83 extern PetscErrorCode MMSForcing4(AppCtx*,const DMDACoor2d*,PetscScalar*);
84 extern PetscErrorCode FormJacobianLocal(DMDALocalInfo*,PetscScalar**,Mat,Mat,AppCtx*);
85 extern PetscErrorCode FormObjectiveLocal(DMDALocalInfo*,PetscScalar**,PetscReal*,AppCtx*);
86 extern PetscErrorCode FormFunctionMatlab(SNES,Vec,Vec,void*);
87 extern PetscErrorCode NonlinearGS(SNES,Vec,Vec,void*);
88 
89 int main(int argc,char **argv)
90 {
91   SNES           snes;                         /* nonlinear solver */
92   Vec            x;                            /* solution vector */
93   AppCtx         user;                         /* user-defined work context */
94   PetscInt       its;                          /* iterations for convergence */
95   PetscReal      bratu_lambda_max = 6.81;
96   PetscReal      bratu_lambda_min = 0.;
97   PetscInt       MMS              = 0;
98   PetscBool      flg              = PETSC_FALSE;
99   DM             da;
100   Vec            r               = NULL;
101   KSP            ksp;
102   PetscInt       lits,slits;
103 
104   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
105      Initialize program
106      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
107 
108   PetscCall(PetscInitialize(&argc,&argv,(char*)0,help));
109 
110   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
111      Initialize problem parameters
112   - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
113   user.param = 6.0;
114   PetscCall(PetscOptionsGetReal(NULL,NULL,"-par",&user.param,NULL));
115   PetscCheck(user.param <= bratu_lambda_max && user.param >= bratu_lambda_min,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Lambda, %g, is out of range, [%g, %g]", (double)user.param, (double)bratu_lambda_min, (double)bratu_lambda_max);
116   PetscCall(PetscOptionsGetInt(NULL,NULL,"-mms",&MMS,NULL));
117   if (MMS == 3) {
118     PetscInt mPar = 2, nPar = 1;
119     PetscCall(PetscOptionsGetInt(NULL,NULL,"-m_par",&mPar,NULL));
120     PetscCall(PetscOptionsGetInt(NULL,NULL,"-n_par",&nPar,NULL));
121     user.m = PetscPowInt(2,mPar);
122     user.n = PetscPowInt(2,nPar);
123   }
124 
125   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
126      Create nonlinear solver context
127      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
128   PetscCall(SNESCreate(PETSC_COMM_WORLD,&snes));
129   PetscCall(SNESSetCountersReset(snes,PETSC_FALSE));
130   PetscCall(SNESSetNGS(snes, NonlinearGS, NULL));
131 
132   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
133      Create distributed array (DMDA) to manage parallel grid and vectors
134   - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
135   PetscCall(DMDACreate2d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE,DMDA_STENCIL_STAR,4,4,PETSC_DECIDE,PETSC_DECIDE,1,1,NULL,NULL,&da));
136   PetscCall(DMSetFromOptions(da));
137   PetscCall(DMSetUp(da));
138   PetscCall(DMDASetUniformCoordinates(da, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0));
139   PetscCall(DMSetApplicationContext(da,&user));
140   PetscCall(SNESSetDM(snes,da));
141   /*  - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
142      Extract global vectors from DMDA; then duplicate for remaining
143      vectors that are the same types
144    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
145   PetscCall(DMCreateGlobalVector(da,&x));
146 
147   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
148      Set local function evaluation routine
149   - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
150   user.mms_solution = ZeroBCSolution;
151   switch (MMS) {
152   case 0: user.mms_solution = NULL; user.mms_forcing = NULL;
153   case 1: user.mms_solution = MMSSolution1; user.mms_forcing = MMSForcing1; break;
154   case 2: user.mms_solution = MMSSolution2; user.mms_forcing = MMSForcing2; break;
155   case 3: user.mms_solution = MMSSolution3; user.mms_forcing = MMSForcing3; break;
156   case 4: user.mms_solution = MMSSolution4; user.mms_forcing = MMSForcing4; break;
157   default: SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_USER,"Unknown MMS type %d",MMS);
158   }
159   PetscCall(DMDASNESSetFunctionLocal(da,INSERT_VALUES,(DMDASNESFunction)FormFunctionLocal,&user));
160   PetscCall(PetscOptionsGetBool(NULL,NULL,"-fd",&flg,NULL));
161   if (!flg) {
162     PetscCall(DMDASNESSetJacobianLocal(da,(DMDASNESJacobian)FormJacobianLocal,&user));
163   }
164 
165   PetscCall(PetscOptionsGetBool(NULL,NULL,"-obj",&flg,NULL));
166   if (flg) {
167     PetscCall(DMDASNESSetObjectiveLocal(da,(DMDASNESObjective)FormObjectiveLocal,&user));
168   }
169 
170   if (PetscDefined(HAVE_MATLAB_ENGINE)) {
171     PetscBool matlab_function = PETSC_FALSE;
172     PetscCall(PetscOptionsGetBool(NULL,NULL,"-matlab_function",&matlab_function,0));
173     if (matlab_function) {
174       PetscCall(VecDuplicate(x,&r));
175       PetscCall(SNESSetFunction(snes,r,FormFunctionMatlab,&user));
176     }
177   }
178 
179   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
180      Customize nonlinear solver; set runtime options
181    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
182   PetscCall(SNESSetFromOptions(snes));
183 
184   PetscCall(FormInitialGuess(da,&user,x));
185 
186   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
187      Solve nonlinear system
188      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
189   PetscCall(SNESSolve(snes,NULL,x));
190   PetscCall(SNESGetIterationNumber(snes,&its));
191 
192   PetscCall(SNESGetLinearSolveIterations(snes,&slits));
193   PetscCall(SNESGetKSP(snes,&ksp));
194   PetscCall(KSPGetTotalIterations(ksp,&lits));
195   PetscCheck(lits == slits,PETSC_COMM_WORLD,PETSC_ERR_PLIB,"Number of total linear iterations reported by SNES %D does not match reported by KSP %D",slits,lits);
196   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
197    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
198 
199   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
200      If using MMS, check the l_2 error
201    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
202   if (MMS) {
203     Vec       e;
204     PetscReal errorl2, errorinf;
205     PetscInt  N;
206 
207     PetscCall(VecDuplicate(x, &e));
208     PetscCall(PetscObjectViewFromOptions((PetscObject) x, NULL, "-sol_view"));
209     PetscCall(FormExactSolution(da, &user, e));
210     PetscCall(PetscObjectViewFromOptions((PetscObject) e, NULL, "-exact_view"));
211     PetscCall(VecAXPY(e, -1.0, x));
212     PetscCall(PetscObjectViewFromOptions((PetscObject) e, NULL, "-error_view"));
213     PetscCall(VecNorm(e, NORM_2, &errorl2));
214     PetscCall(VecNorm(e, NORM_INFINITY, &errorinf));
215     PetscCall(VecGetSize(e, &N));
216     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "N: %D error L2 %g inf %g\n", N, (double) errorl2/PetscSqrtReal((PetscReal)N), (double) errorinf));
217     PetscCall(VecDestroy(&e));
218     PetscCall(PetscLogEventSetDof(SNES_Solve, 0, N));
219     PetscCall(PetscLogEventSetError(SNES_Solve, 0, errorl2/PetscSqrtReal(N)));
220   }
221 
222   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
223      Free work space.  All PETSc objects should be destroyed when they
224      are no longer needed.
225    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
226   PetscCall(VecDestroy(&r));
227   PetscCall(VecDestroy(&x));
228   PetscCall(SNESDestroy(&snes));
229   PetscCall(DMDestroy(&da));
230   PetscCall(PetscFinalize());
231   return 0;
232 }
233 /* ------------------------------------------------------------------- */
234 /*
235    FormInitialGuess - Forms initial approximation.
236 
237    Input Parameters:
238    da - The DM
239    user - user-defined application context
240 
241    Output Parameter:
242    X - vector
243  */
244 PetscErrorCode FormInitialGuess(DM da,AppCtx *user,Vec X)
245 {
246   PetscInt       i,j,Mx,My,xs,ys,xm,ym;
247   PetscReal      lambda,temp1,temp,hx,hy;
248   PetscScalar    **x;
249 
250   PetscFunctionBeginUser;
251   PetscCall(DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE));
252 
253   lambda = user->param;
254   hx     = 1.0/(PetscReal)(Mx-1);
255   hy     = 1.0/(PetscReal)(My-1);
256   temp1  = lambda/(lambda + 1.0);
257 
258   /*
259      Get a pointer to vector data.
260        - For default PETSc vectors, VecGetArray() returns a pointer to
261          the data array.  Otherwise, the routine is implementation dependent.
262        - You MUST call VecRestoreArray() when you no longer need access to
263          the array.
264   */
265   PetscCall(DMDAVecGetArray(da,X,&x));
266 
267   /*
268      Get local grid boundaries (for 2-dimensional DMDA):
269        xs, ys   - starting grid indices (no ghost points)
270        xm, ym   - widths of local grid (no ghost points)
271 
272   */
273   PetscCall(DMDAGetCorners(da,&xs,&ys,NULL,&xm,&ym,NULL));
274 
275   /*
276      Compute initial guess over the locally owned part of the grid
277   */
278   for (j=ys; j<ys+ym; j++) {
279     temp = (PetscReal)(PetscMin(j,My-j-1))*hy;
280     for (i=xs; i<xs+xm; i++) {
281       if (i == 0 || j == 0 || i == Mx-1 || j == My-1) {
282         /* boundary conditions are all zero Dirichlet */
283         x[j][i] = 0.0;
284       } else {
285         x[j][i] = temp1*PetscSqrtReal(PetscMin((PetscReal)(PetscMin(i,Mx-i-1))*hx,temp));
286       }
287     }
288   }
289 
290   /*
291      Restore vector
292   */
293   PetscCall(DMDAVecRestoreArray(da,X,&x));
294   PetscFunctionReturn(0);
295 }
296 
297 /*
298   FormExactSolution - Forms MMS solution
299 
300   Input Parameters:
301   da - The DM
302   user - user-defined application context
303 
304   Output Parameter:
305   X - vector
306  */
307 PetscErrorCode FormExactSolution(DM da, AppCtx *user, Vec U)
308 {
309   DM             coordDA;
310   Vec            coordinates;
311   DMDACoor2d   **coords;
312   PetscScalar  **u;
313   PetscInt       xs, ys, xm, ym, i, j;
314 
315   PetscFunctionBeginUser;
316   PetscCall(DMDAGetCorners(da, &xs, &ys, NULL, &xm, &ym, NULL));
317   PetscCall(DMGetCoordinateDM(da, &coordDA));
318   PetscCall(DMGetCoordinates(da, &coordinates));
319   PetscCall(DMDAVecGetArray(coordDA, coordinates, &coords));
320   PetscCall(DMDAVecGetArray(da, U, &u));
321   for (j = ys; j < ys+ym; ++j) {
322     for (i = xs; i < xs+xm; ++i) {
323       user->mms_solution(user,&coords[j][i],&u[j][i]);
324     }
325   }
326   PetscCall(DMDAVecRestoreArray(da, U, &u));
327   PetscCall(DMDAVecRestoreArray(coordDA, coordinates, &coords));
328   PetscFunctionReturn(0);
329 }
330 
331 PetscErrorCode ZeroBCSolution(AppCtx *user,const DMDACoor2d *c,PetscScalar *u)
332 {
333   u[0] = 0.;
334   return 0;
335 }
336 
337 /* The functions below evaluate the MMS solution u(x,y) and associated forcing
338 
339      f(x,y) = -u_xx - u_yy - lambda exp(u)
340 
341   such that u(x,y) is an exact solution with f(x,y) as the right hand side forcing term.
342  */
343 PetscErrorCode MMSSolution1(AppCtx *user,const DMDACoor2d *c,PetscScalar *u)
344 {
345   PetscReal x = PetscRealPart(c->x), y = PetscRealPart(c->y);
346   u[0] = x*(1 - x)*y*(1 - y);
347   PetscLogFlops(5);
348   return 0;
349 }
350 PetscErrorCode MMSForcing1(AppCtx *user,const DMDACoor2d *c,PetscScalar *f)
351 {
352   PetscReal x = PetscRealPart(c->x), y = PetscRealPart(c->y);
353   f[0] = 2*x*(1 - x) + 2*y*(1 - y) - user->param*PetscExpReal(x*(1 - x)*y*(1 - y));
354   return 0;
355 }
356 
357 PetscErrorCode MMSSolution2(AppCtx *user,const DMDACoor2d *c,PetscScalar *u)
358 {
359   PetscReal x = PetscRealPart(c->x), y = PetscRealPart(c->y);
360   u[0] = PetscSinReal(PETSC_PI*x)*PetscSinReal(PETSC_PI*y);
361   PetscLogFlops(5);
362   return 0;
363 }
364 PetscErrorCode MMSForcing2(AppCtx *user,const DMDACoor2d *c,PetscScalar *f)
365 {
366   PetscReal x = PetscRealPart(c->x), y = PetscRealPart(c->y);
367   f[0] = 2*PetscSqr(PETSC_PI)*PetscSinReal(PETSC_PI*x)*PetscSinReal(PETSC_PI*y) - user->param*PetscExpReal(PetscSinReal(PETSC_PI*x)*PetscSinReal(PETSC_PI*y));
368   return 0;
369 }
370 
371 PetscErrorCode MMSSolution3(AppCtx *user,const DMDACoor2d *c,PetscScalar *u)
372 {
373   PetscReal x = PetscRealPart(c->x), y = PetscRealPart(c->y);
374   u[0] = PetscSinReal(user->m*PETSC_PI*x*(1-y))*PetscSinReal(user->n*PETSC_PI*y*(1-x));
375   PetscLogFlops(5);
376   return 0;
377 }
378 PetscErrorCode MMSForcing3(AppCtx *user,const DMDACoor2d *c,PetscScalar *f)
379 {
380   PetscReal x = PetscRealPart(c->x), y = PetscRealPart(c->y);
381   PetscReal m = user->m, n = user->n, lambda = user->param;
382   f[0] = (-(PetscExpReal(PetscSinReal(m*PETSC_PI*x*(1 - y))*PetscSinReal(n*PETSC_PI*(1 - x)*y))*lambda)
383           + PetscSqr(PETSC_PI)*(-2*m*n*((-1 + x)*x + (-1 + y)*y)*PetscCosReal(m*PETSC_PI*x*(-1 + y))*PetscCosReal(n*PETSC_PI*(-1 + x)*y)
384                                 + (PetscSqr(m)*(PetscSqr(x) + PetscSqr(-1 + y)) + PetscSqr(n)*(PetscSqr(-1 + x) + PetscSqr(y)))
385                                 *PetscSinReal(m*PETSC_PI*x*(-1 + y))*PetscSinReal(n*PETSC_PI*(-1 + x)*y)));
386   return 0;
387 }
388 
389 PetscErrorCode MMSSolution4(AppCtx *user,const DMDACoor2d *c,PetscScalar *u)
390 {
391   const PetscReal Lx = 1.,Ly = 1.;
392   PetscReal x = PetscRealPart(c->x), y = PetscRealPart(c->y);
393   u[0] = (PetscPowReal(x,4)-PetscSqr(Lx)*PetscSqr(x))*(PetscPowReal(y,4)-PetscSqr(Ly)*PetscSqr(y));
394   PetscLogFlops(9);
395   return 0;
396 }
397 PetscErrorCode MMSForcing4(AppCtx *user,const DMDACoor2d *c,PetscScalar *f)
398 {
399   const PetscReal Lx = 1.,Ly = 1.;
400   PetscReal x = PetscRealPart(c->x), y = PetscRealPart(c->y);
401   f[0] = (2*PetscSqr(x)*(PetscSqr(x)-PetscSqr(Lx))*(PetscSqr(Ly)-6*PetscSqr(y))
402           + 2*PetscSqr(y)*(PetscSqr(Lx)-6*PetscSqr(x))*(PetscSqr(y)-PetscSqr(Ly))
403           - user->param*PetscExpReal((PetscPowReal(x,4)-PetscSqr(Lx)*PetscSqr(x))*(PetscPowReal(y,4)-PetscSqr(Ly)*PetscSqr(y))));
404   return 0;
405 }
406 
407 /* ------------------------------------------------------------------- */
408 /*
409    FormFunctionLocal - Evaluates nonlinear function, F(x) on local process patch
410 
411  */
412 PetscErrorCode FormFunctionLocal(DMDALocalInfo *info,PetscScalar **x,PetscScalar **f,AppCtx *user)
413 {
414   PetscInt       i,j;
415   PetscReal      lambda,hx,hy,hxdhy,hydhx;
416   PetscScalar    u,ue,uw,un,us,uxx,uyy,mms_solution,mms_forcing;
417   DMDACoor2d     c;
418 
419   PetscFunctionBeginUser;
420   lambda = user->param;
421   hx     = 1.0/(PetscReal)(info->mx-1);
422   hy     = 1.0/(PetscReal)(info->my-1);
423   hxdhy  = hx/hy;
424   hydhx  = hy/hx;
425   /*
426      Compute function over the locally owned part of the grid
427   */
428   for (j=info->ys; j<info->ys+info->ym; j++) {
429     for (i=info->xs; i<info->xs+info->xm; i++) {
430       if (i == 0 || j == 0 || i == info->mx-1 || j == info->my-1) {
431         c.x = i*hx; c.y = j*hy;
432         PetscCall(user->mms_solution(user,&c,&mms_solution));
433         f[j][i] = 2.0*(hydhx+hxdhy)*(x[j][i] - mms_solution);
434       } else {
435         u  = x[j][i];
436         uw = x[j][i-1];
437         ue = x[j][i+1];
438         un = x[j-1][i];
439         us = x[j+1][i];
440 
441         /* Enforce boundary conditions at neighboring points -- setting these values causes the Jacobian to be symmetric. */
442         if (i-1 == 0) {c.x = (i-1)*hx; c.y = j*hy; PetscCall(user->mms_solution(user,&c,&uw));}
443         if (i+1 == info->mx-1) {c.x = (i+1)*hx; c.y = j*hy; PetscCall(user->mms_solution(user,&c,&ue));}
444         if (j-1 == 0) {c.x = i*hx; c.y = (j-1)*hy; PetscCall(user->mms_solution(user,&c,&un));}
445         if (j+1 == info->my-1) {c.x = i*hx; c.y = (j+1)*hy; PetscCall(user->mms_solution(user,&c,&us));}
446 
447         uxx     = (2.0*u - uw - ue)*hydhx;
448         uyy     = (2.0*u - un - us)*hxdhy;
449         mms_forcing = 0;
450         c.x = i*hx; c.y = j*hy;
451         if (user->mms_forcing) PetscCall(user->mms_forcing(user,&c,&mms_forcing));
452         f[j][i] = uxx + uyy - hx*hy*(lambda*PetscExpScalar(u) + mms_forcing);
453       }
454     }
455   }
456   PetscCall(PetscLogFlops(11.0*info->ym*info->xm));
457   PetscFunctionReturn(0);
458 }
459 
460 /* FormObjectiveLocal - Evaluates nonlinear function, F(x) on local process patch */
461 PetscErrorCode FormObjectiveLocal(DMDALocalInfo *info,PetscScalar **x,PetscReal *obj,AppCtx *user)
462 {
463   PetscInt       i,j;
464   PetscReal      lambda,hx,hy,hxdhy,hydhx,sc,lobj=0;
465   PetscScalar    u,ue,uw,un,us,uxux,uyuy;
466   MPI_Comm       comm;
467 
468   PetscFunctionBeginUser;
469   *obj   = 0;
470   PetscCall(PetscObjectGetComm((PetscObject)info->da,&comm));
471   lambda = user->param;
472   hx     = 1.0/(PetscReal)(info->mx-1);
473   hy     = 1.0/(PetscReal)(info->my-1);
474   sc     = hx*hy*lambda;
475   hxdhy  = hx/hy;
476   hydhx  = hy/hx;
477   /*
478      Compute function over the locally owned part of the grid
479   */
480   for (j=info->ys; j<info->ys+info->ym; j++) {
481     for (i=info->xs; i<info->xs+info->xm; i++) {
482       if (i == 0 || j == 0 || i == info->mx-1 || j == info->my-1) {
483         lobj += PetscRealPart((hydhx + hxdhy)*x[j][i]*x[j][i]);
484       } else {
485         u  = x[j][i];
486         uw = x[j][i-1];
487         ue = x[j][i+1];
488         un = x[j-1][i];
489         us = x[j+1][i];
490 
491         if (i-1 == 0) uw = 0.;
492         if (i+1 == info->mx-1) ue = 0.;
493         if (j-1 == 0) un = 0.;
494         if (j+1 == info->my-1) us = 0.;
495 
496         /* F[u] = 1/2\int_{\omega}\nabla^2u(x)*u(x)*dx */
497 
498         uxux = u*(2.*u - ue - uw)*hydhx;
499         uyuy = u*(2.*u - un - us)*hxdhy;
500 
501         lobj += PetscRealPart(0.5*(uxux + uyuy) - sc*PetscExpScalar(u));
502       }
503     }
504   }
505   PetscCall(PetscLogFlops(12.0*info->ym*info->xm));
506   PetscCallMPI(MPI_Allreduce(&lobj,obj,1,MPIU_REAL,MPIU_SUM,comm));
507   PetscFunctionReturn(0);
508 }
509 
510 /*
511    FormJacobianLocal - Evaluates Jacobian matrix on local process patch
512 */
513 PetscErrorCode FormJacobianLocal(DMDALocalInfo *info,PetscScalar **x,Mat jac,Mat jacpre,AppCtx *user)
514 {
515   PetscInt       i,j,k;
516   MatStencil     col[5],row;
517   PetscScalar    lambda,v[5],hx,hy,hxdhy,hydhx,sc;
518   DM             coordDA;
519   Vec            coordinates;
520   DMDACoor2d   **coords;
521 
522   PetscFunctionBeginUser;
523   lambda = user->param;
524   /* Extract coordinates */
525   PetscCall(DMGetCoordinateDM(info->da, &coordDA));
526   PetscCall(DMGetCoordinates(info->da, &coordinates));
527   PetscCall(DMDAVecGetArray(coordDA, coordinates, &coords));
528   hx     = info->xm > 1 ? PetscRealPart(coords[info->ys][info->xs+1].x) - PetscRealPart(coords[info->ys][info->xs].x) : 1.0;
529   hy     = info->ym > 1 ? PetscRealPart(coords[info->ys+1][info->xs].y) - PetscRealPart(coords[info->ys][info->xs].y) : 1.0;
530   PetscCall(DMDAVecRestoreArray(coordDA, coordinates, &coords));
531   hxdhy  = hx/hy;
532   hydhx  = hy/hx;
533   sc     = hx*hy*lambda;
534 
535   /*
536      Compute entries for the locally owned part of the Jacobian.
537       - Currently, all PETSc parallel matrix formats are partitioned by
538         contiguous chunks of rows across the processors.
539       - Each processor needs to insert only elements that it owns
540         locally (but any non-local elements will be sent to the
541         appropriate processor during matrix assembly).
542       - Here, we set all entries for a particular row at once.
543       - We can set matrix entries either using either
544         MatSetValuesLocal() or MatSetValues(), as discussed above.
545   */
546   for (j=info->ys; j<info->ys+info->ym; j++) {
547     for (i=info->xs; i<info->xs+info->xm; i++) {
548       row.j = j; row.i = i;
549       /* boundary points */
550       if (i == 0 || j == 0 || i == info->mx-1 || j == info->my-1) {
551         v[0] =  2.0*(hydhx + hxdhy);
552         PetscCall(MatSetValuesStencil(jacpre,1,&row,1,&row,v,INSERT_VALUES));
553       } else {
554         k = 0;
555         /* interior grid points */
556         if (j-1 != 0) {
557           v[k]     = -hxdhy;
558           col[k].j = j - 1; col[k].i = i;
559           k++;
560         }
561         if (i-1 != 0) {
562           v[k]     = -hydhx;
563           col[k].j = j;     col[k].i = i-1;
564           k++;
565         }
566 
567         v[k] = 2.0*(hydhx + hxdhy) - sc*PetscExpScalar(x[j][i]); col[k].j = row.j; col[k].i = row.i; k++;
568 
569         if (i+1 != info->mx-1) {
570           v[k]     = -hydhx;
571           col[k].j = j;     col[k].i = i+1;
572           k++;
573         }
574         if (j+1 != info->mx-1) {
575           v[k]     = -hxdhy;
576           col[k].j = j + 1; col[k].i = i;
577           k++;
578         }
579         PetscCall(MatSetValuesStencil(jacpre,1,&row,k,col,v,INSERT_VALUES));
580       }
581     }
582   }
583 
584   /*
585      Assemble matrix, using the 2-step process:
586        MatAssemblyBegin(), MatAssemblyEnd().
587   */
588   PetscCall(MatAssemblyBegin(jacpre,MAT_FINAL_ASSEMBLY));
589   PetscCall(MatAssemblyEnd(jacpre,MAT_FINAL_ASSEMBLY));
590   /*
591      Tell the matrix we will never add a new nonzero location to the
592      matrix. If we do, it will generate an error.
593   */
594   PetscCall(MatSetOption(jac,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE));
595   PetscFunctionReturn(0);
596 }
597 
598 PetscErrorCode FormFunctionMatlab(SNES snes,Vec X,Vec F,void *ptr)
599 {
600 #if PetscDefined(HAVE_MATLAB_ENGINE)
601   AppCtx         *user = (AppCtx*)ptr;
602   PetscInt       Mx,My;
603   PetscReal      lambda,hx,hy;
604   Vec            localX,localF;
605   MPI_Comm       comm;
606   DM             da;
607 
608   PetscFunctionBeginUser;
609   PetscCall(SNESGetDM(snes,&da));
610   PetscCall(DMGetLocalVector(da,&localX));
611   PetscCall(DMGetLocalVector(da,&localF));
612   PetscCall(PetscObjectSetName((PetscObject)localX,"localX"));
613   PetscCall(PetscObjectSetName((PetscObject)localF,"localF"));
614   PetscCall(DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE));
615 
616   lambda = user->param;
617   hx     = 1.0/(PetscReal)(Mx-1);
618   hy     = 1.0/(PetscReal)(My-1);
619 
620   PetscCall(PetscObjectGetComm((PetscObject)snes,&comm));
621   /*
622      Scatter ghost points to local vector,using the 2-step process
623         DMGlobalToLocalBegin(),DMGlobalToLocalEnd().
624      By placing code between these two statements, computations can be
625      done while messages are in transition.
626   */
627   PetscCall(DMGlobalToLocalBegin(da,X,INSERT_VALUES,localX));
628   PetscCall(DMGlobalToLocalEnd(da,X,INSERT_VALUES,localX));
629   PetscCall(PetscMatlabEnginePut(PETSC_MATLAB_ENGINE_(comm),(PetscObject)localX));
630   PetscCall(PetscMatlabEngineEvaluate(PETSC_MATLAB_ENGINE_(comm),"localF=ex5m(localX,%18.16e,%18.16e,%18.16e)",hx,hy,lambda));
631   PetscCall(PetscMatlabEngineGet(PETSC_MATLAB_ENGINE_(comm),(PetscObject)localF));
632 
633   /*
634      Insert values into global vector
635   */
636   PetscCall(DMLocalToGlobalBegin(da,localF,INSERT_VALUES,F));
637   PetscCall(DMLocalToGlobalEnd(da,localF,INSERT_VALUES,F));
638   PetscCall(DMRestoreLocalVector(da,&localX));
639   PetscCall(DMRestoreLocalVector(da,&localF));
640   PetscFunctionReturn(0);
641 #else
642     return 0;                     /* Never called */
643 #endif
644 }
645 
646 /* ------------------------------------------------------------------- */
647 /*
648       Applies some sweeps on nonlinear Gauss-Seidel on each process
649 
650  */
651 PetscErrorCode NonlinearGS(SNES snes,Vec X, Vec B, void *ctx)
652 {
653   PetscInt       i,j,k,Mx,My,xs,ys,xm,ym,its,tot_its,sweeps,l;
654   PetscReal      lambda,hx,hy,hxdhy,hydhx,sc;
655   PetscScalar    **x,**b,bij,F,F0=0,J,u,un,us,ue,eu,uw,uxx,uyy,y;
656   PetscReal      atol,rtol,stol;
657   DM             da;
658   AppCtx         *user;
659   Vec            localX,localB;
660 
661   PetscFunctionBeginUser;
662   tot_its = 0;
663   PetscCall(SNESNGSGetSweeps(snes,&sweeps));
664   PetscCall(SNESNGSGetTolerances(snes,&atol,&rtol,&stol,&its));
665   PetscCall(SNESGetDM(snes,&da));
666   PetscCall(DMGetApplicationContext(da,&user));
667 
668   PetscCall(DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE));
669 
670   lambda = user->param;
671   hx     = 1.0/(PetscReal)(Mx-1);
672   hy     = 1.0/(PetscReal)(My-1);
673   sc     = hx*hy*lambda;
674   hxdhy  = hx/hy;
675   hydhx  = hy/hx;
676 
677   PetscCall(DMGetLocalVector(da,&localX));
678   if (B) {
679     PetscCall(DMGetLocalVector(da,&localB));
680   }
681   for (l=0; l<sweeps; l++) {
682 
683     PetscCall(DMGlobalToLocalBegin(da,X,INSERT_VALUES,localX));
684     PetscCall(DMGlobalToLocalEnd(da,X,INSERT_VALUES,localX));
685     if (B) {
686       PetscCall(DMGlobalToLocalBegin(da,B,INSERT_VALUES,localB));
687       PetscCall(DMGlobalToLocalEnd(da,B,INSERT_VALUES,localB));
688     }
689     /*
690      Get a pointer to vector data.
691      - For default PETSc vectors, VecGetArray() returns a pointer to
692      the data array.  Otherwise, the routine is implementation dependent.
693      - You MUST call VecRestoreArray() when you no longer need access to
694      the array.
695      */
696     PetscCall(DMDAVecGetArray(da,localX,&x));
697     if (B) PetscCall(DMDAVecGetArray(da,localB,&b));
698     /*
699      Get local grid boundaries (for 2-dimensional DMDA):
700      xs, ys   - starting grid indices (no ghost points)
701      xm, ym   - widths of local grid (no ghost points)
702      */
703     PetscCall(DMDAGetCorners(da,&xs,&ys,NULL,&xm,&ym,NULL));
704 
705     for (j=ys; j<ys+ym; j++) {
706       for (i=xs; i<xs+xm; i++) {
707         if (i == 0 || j == 0 || i == Mx-1 || j == My-1) {
708           /* boundary conditions are all zero Dirichlet */
709           x[j][i] = 0.0;
710         } else {
711           if (B) bij = b[j][i];
712           else   bij = 0.;
713 
714           u  = x[j][i];
715           un = x[j-1][i];
716           us = x[j+1][i];
717           ue = x[j][i-1];
718           uw = x[j][i+1];
719 
720           for (k=0; k<its; k++) {
721             eu  = PetscExpScalar(u);
722             uxx = (2.0*u - ue - uw)*hydhx;
723             uyy = (2.0*u - un - us)*hxdhy;
724             F   = uxx + uyy - sc*eu - bij;
725             if (k == 0) F0 = F;
726             J  = 2.0*(hydhx + hxdhy) - sc*eu;
727             y  = F/J;
728             u -= y;
729             tot_its++;
730 
731             if (atol > PetscAbsReal(PetscRealPart(F)) ||
732                 rtol*PetscAbsReal(PetscRealPart(F0)) > PetscAbsReal(PetscRealPart(F)) ||
733                 stol*PetscAbsReal(PetscRealPart(u)) > PetscAbsReal(PetscRealPart(y))) {
734               break;
735             }
736           }
737           x[j][i] = u;
738         }
739       }
740     }
741     /*
742      Restore vector
743      */
744     PetscCall(DMDAVecRestoreArray(da,localX,&x));
745     PetscCall(DMLocalToGlobalBegin(da,localX,INSERT_VALUES,X));
746     PetscCall(DMLocalToGlobalEnd(da,localX,INSERT_VALUES,X));
747   }
748   PetscCall(PetscLogFlops(tot_its*(21.0)));
749   PetscCall(DMRestoreLocalVector(da,&localX));
750   if (B) {
751     PetscCall(DMDAVecRestoreArray(da,localB,&b));
752     PetscCall(DMRestoreLocalVector(da,&localB));
753   }
754   PetscFunctionReturn(0);
755 }
756 
757 /*TEST
758 
759    test:
760      suffix: asm_0
761      requires: !single
762      args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 2 -pc_asm_overlap 0 -pc_asm_local_type additive -sub_pc_type lu
763 
764    test:
765      suffix: msm_0
766      requires: !single
767      args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 2 -pc_asm_overlap 0 -pc_asm_local_type multiplicative -sub_pc_type lu
768 
769    test:
770      suffix: asm_1
771      requires: !single
772      args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 2 -pc_asm_overlap 0 -pc_asm_local_type additive -sub_pc_type lu -da_grid_x 8
773 
774    test:
775      suffix: msm_1
776      requires: !single
777      args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 2 -pc_asm_overlap 0 -pc_asm_local_type multiplicative -sub_pc_type lu -da_grid_x 8
778 
779    test:
780      suffix: asm_2
781      requires: !single
782      args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 3 -pc_asm_overlap 0 -pc_asm_local_type additive -sub_pc_type lu -da_grid_x 8
783 
784    test:
785      suffix: msm_2
786      requires: !single
787      args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 3 -pc_asm_overlap 0 -pc_asm_local_type multiplicative -sub_pc_type lu -da_grid_x 8
788 
789    test:
790      suffix: asm_3
791      requires: !single
792      args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 4 -pc_asm_overlap 0 -pc_asm_local_type additive -sub_pc_type lu -da_grid_x 8
793 
794    test:
795      suffix: msm_3
796      requires: !single
797      args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 4 -pc_asm_overlap 0 -pc_asm_local_type multiplicative -sub_pc_type lu -da_grid_x 8
798 
799    test:
800      suffix: asm_4
801      requires: !single
802      nsize: 2
803      args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 2 -pc_asm_overlap 0 -pc_asm_local_type additive -sub_pc_type lu -da_grid_x 8
804 
805    test:
806      suffix: msm_4
807      requires: !single
808      nsize: 2
809      args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 2 -pc_asm_overlap 0 -pc_asm_local_type multiplicative -sub_pc_type lu -da_grid_x 8
810 
811    test:
812      suffix: asm_5
813      requires: !single
814      nsize: 2
815      args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 4 -pc_asm_overlap 0 -pc_asm_local_type additive -sub_pc_type lu -da_grid_x 8
816 
817    test:
818      suffix: msm_5
819      requires: !single
820      nsize: 2
821      args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 4 -pc_asm_overlap 0 -pc_asm_local_type multiplicative -sub_pc_type lu -da_grid_x 8
822 
823    test:
824      args: -snes_rtol 1.e-5 -pc_type mg -ksp_monitor_short -snes_view -pc_mg_levels 3 -pc_mg_galerkin pmat -da_grid_x 17 -da_grid_y 17 -mg_levels_ksp_monitor_short -mg_levels_ksp_norm_type unpreconditioned -snes_monitor_short -mg_levels_ksp_chebyshev_esteig 0.5,1.1 -mg_levels_pc_type sor -pc_mg_type full
825      requires: !single
826 
827    test:
828      suffix: 2
829      args: -pc_type mg -ksp_converged_reason -snes_view -pc_mg_galerkin pmat -snes_grid_sequence 3 -mg_levels_ksp_norm_type unpreconditioned -snes_monitor_short -mg_levels_ksp_chebyshev_esteig 0.5,1.1 -mg_levels_pc_type sor -pc_mg_type full -ksp_atol -1.
830 
831    test:
832      suffix: 3
833      nsize: 2
834      args: -snes_grid_sequence 2 -snes_mf_operator -snes_converged_reason -snes_view -pc_type mg -snes_atol -1 -snes_rtol 1.e-2
835      filter: grep -v "otal number of function evaluations"
836 
837    test:
838      suffix: 4
839      nsize: 2
840      args: -snes_grid_sequence 2 -snes_monitor_short -ksp_converged_reason -snes_converged_reason -snes_view -pc_type mg -snes_atol -1 -ksp_atol -1
841 
842    test:
843      suffix: 5_anderson
844      args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type anderson
845 
846    test:
847      suffix: 5_aspin
848      nsize: 4
849      args: -snes_monitor_short -ksp_monitor_short -snes_converged_reason -da_refine 4 -da_overlap 3 -snes_type aspin -snes_view
850 
851    test:
852      suffix: 5_broyden
853      args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type qn -snes_qn_type broyden -snes_qn_m 10
854 
855    test:
856      suffix: 5_fas
857      args: -fas_coarse_snes_max_it 1 -fas_coarse_pc_type lu -fas_coarse_ksp_type preonly -snes_monitor_short -snes_type fas -fas_coarse_ksp_type richardson -da_refine 6
858      requires: !single
859 
860    test:
861      suffix: 5_fas_additive
862      args: -fas_coarse_snes_max_it 1 -fas_coarse_pc_type lu -fas_coarse_ksp_type preonly -snes_monitor_short -snes_type fas -fas_coarse_ksp_type richardson -da_refine 6 -snes_fas_type additive -snes_max_it 50
863 
864    test:
865      suffix: 5_fas_monitor
866      args: -da_refine 1 -snes_type fas -snes_fas_monitor
867      requires: !single
868 
869    test:
870      suffix: 5_ls
871      args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type newtonls
872 
873    test:
874      suffix: 5_ls_sell_sor
875      args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type newtonls -dm_mat_type sell -pc_type sor
876      output_file: output/ex5_5_ls.out
877 
878    test:
879      suffix: 5_nasm
880      nsize: 4
881      args: -snes_monitor_short -snes_converged_reason -da_refine 4 -da_overlap 3 -snes_type nasm -snes_nasm_type restrict -snes_max_it 10
882 
883    test:
884      suffix: 5_ncg
885      args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type ncg -snes_ncg_type fr
886 
887    test:
888      suffix: 5_newton_asm_dmda
889      nsize: 4
890      args: -snes_monitor_short -ksp_monitor_short -snes_converged_reason -da_refine 4 -da_overlap 3 -snes_type newtonls -pc_type asm -pc_asm_dm_subdomains -malloc_dump
891      requires: !single
892 
893    test:
894      suffix: 5_newton_gasm_dmda
895      nsize: 4
896      args: -snes_monitor_short -ksp_monitor_short -snes_converged_reason -da_refine 4 -da_overlap 3 -snes_type newtonls -pc_type gasm -malloc_dump
897      requires: !single
898 
899    test:
900      suffix: 5_ngmres
901      args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type ngmres -snes_ngmres_m 10
902 
903    test:
904      suffix: 5_ngmres_fas
905      args: -snes_rtol 1.e-4 -snes_type ngmres -npc_fas_coarse_snes_max_it 1 -npc_fas_coarse_snes_type newtonls -npc_fas_coarse_pc_type lu -npc_fas_coarse_ksp_type preonly -snes_ngmres_m 10 -snes_monitor_short -npc_snes_max_it 1 -npc_snes_type fas -npc_fas_coarse_ksp_type richardson -da_refine 6
906 
907    test:
908      suffix: 5_ngmres_ngs
909      args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type ngmres -npc_snes_type ngs -npc_snes_max_it 1
910 
911    test:
912      suffix: 5_ngmres_nrichardson
913      args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type ngmres -snes_ngmres_m 10 -npc_snes_type nrichardson -npc_snes_max_it 3
914      output_file: output/ex5_5_ngmres_richardson.out
915 
916    test:
917      suffix: 5_nrichardson
918      args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type nrichardson
919 
920    test:
921      suffix: 5_qn
922      args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type qn -snes_linesearch_type cp -snes_qn_m 10
923 
924    test:
925      suffix: 6
926      nsize: 4
927      args: -snes_converged_reason -ksp_converged_reason -da_grid_x 129 -da_grid_y 129 -pc_type mg -pc_mg_levels 8 -mg_levels_ksp_type chebyshev -mg_levels_ksp_chebyshev_esteig 0,0.5,0,1.1 -mg_levels_ksp_max_it 2
928 
929    test:
930      requires: complex !single
931      suffix: complex
932      args: -snes_mf_operator -mat_mffd_complex -snes_monitor
933 
934 TEST*/
935