xref: /petsc/src/snes/tutorials/ex5.c (revision 2fa40bb9206b96114faa7cb222621ec184d31cd2)
1 static char help[] = "Bratu nonlinear PDE in 2d.\n\
2 We solve the  Bratu (SFI - solid fuel ignition) problem in a 2D rectangular\n\
3 domain, using distributed arrays (DMDAs) to partition the parallel grid.\n\
4 The command line options include:\n\
5   -par <parameter>, where <parameter> indicates the problem's nonlinearity\n\
6      problem SFI:  <parameter> = Bratu parameter (0 <= par <= 6.81)\n\n\
7   -m_par/n_par <parameter>, where <parameter> indicates an integer\n \
8       that MMS3 will be evaluated with 2^m_par, 2^n_par";
9 
10 /*T
11    Concepts: SNES^parallel Bratu example
12    Concepts: DMDA^using distributed arrays;
13    Concepts: IS coloirng types;
14    Processors: n
15 T*/
16 
17 /* ------------------------------------------------------------------------
18 
19     Solid Fuel Ignition (SFI) problem.  This problem is modeled by
20     the partial differential equation
21 
22             -Laplacian u - lambda*exp(u) = 0,  0 < x,y < 1,
23 
24     with boundary conditions
25 
26              u = 0  for  x = 0, x = 1, y = 0, y = 1.
27 
28     A finite difference approximation with the usual 5-point stencil
29     is used to discretize the boundary value problem to obtain a nonlinear
30     system of equations.
31 
32       This example shows how geometric multigrid can be run transparently with a nonlinear solver so long
33       as SNESSetDM() is provided. Example usage
34 
35       ./ex5 -pc_type mg -ksp_monitor  -snes_view -pc_mg_levels 3 -pc_mg_galerkin pmat -da_grid_x 17 -da_grid_y 17
36              -mg_levels_ksp_monitor -snes_monitor -mg_levels_pc_type sor -pc_mg_type full
37 
38       or to run with grid sequencing on the nonlinear problem (note that you do not need to provide the number of
39          multigrid levels, it will be determined automatically based on the number of refinements done)
40 
41       ./ex5 -pc_type mg -ksp_monitor  -snes_view -pc_mg_galerkin pmat -snes_grid_sequence 3
42              -mg_levels_ksp_monitor -snes_monitor -mg_levels_pc_type sor -pc_mg_type full
43 
44   ------------------------------------------------------------------------- */
45 
46 /*
47    Include "petscdmda.h" so that we can use distributed arrays (DMDAs).
48    Include "petscsnes.h" so that we can use SNES solvers.  Note that this
49 */
50 #include <petscdm.h>
51 #include <petscdmda.h>
52 #include <petscsnes.h>
53 #include <petscmatlab.h>
54 #include <petsc/private/snesimpl.h> /* For SNES_Solve event */
55 
56 /*
57    User-defined application context - contains data needed by the
58    application-provided call-back routines, FormJacobianLocal() and
59    FormFunctionLocal().
60 */
61 typedef struct AppCtx AppCtx;
62 struct AppCtx {
63   PetscReal param;          /* test problem parameter */
64   PetscInt  m,n;            /* MMS3 parameters */
65   PetscErrorCode (*mms_solution)(AppCtx*,const DMDACoor2d*,PetscScalar*);
66   PetscErrorCode (*mms_forcing)(AppCtx*,const DMDACoor2d*,PetscScalar*);
67 };
68 
69 /*
70    User-defined routines
71 */
72 extern PetscErrorCode FormInitialGuess(DM,AppCtx*,Vec);
73 extern PetscErrorCode FormFunctionLocal(DMDALocalInfo*,PetscScalar**,PetscScalar**,AppCtx*);
74 extern PetscErrorCode FormExactSolution(DM,AppCtx*,Vec);
75 extern PetscErrorCode ZeroBCSolution(AppCtx*,const DMDACoor2d*,PetscScalar*);
76 extern PetscErrorCode MMSSolution1(AppCtx*,const DMDACoor2d*,PetscScalar*);
77 extern PetscErrorCode MMSForcing1(AppCtx*,const DMDACoor2d*,PetscScalar*);
78 extern PetscErrorCode MMSSolution2(AppCtx*,const DMDACoor2d*,PetscScalar*);
79 extern PetscErrorCode MMSForcing2(AppCtx*,const DMDACoor2d*,PetscScalar*);
80 extern PetscErrorCode MMSSolution3(AppCtx*,const DMDACoor2d*,PetscScalar*);
81 extern PetscErrorCode MMSForcing3(AppCtx*,const DMDACoor2d*,PetscScalar*);
82 extern PetscErrorCode MMSSolution4(AppCtx*,const DMDACoor2d*,PetscScalar*);
83 extern PetscErrorCode MMSForcing4(AppCtx*,const DMDACoor2d*,PetscScalar*);
84 extern PetscErrorCode FormJacobianLocal(DMDALocalInfo*,PetscScalar**,Mat,Mat,AppCtx*);
85 extern PetscErrorCode FormObjectiveLocal(DMDALocalInfo*,PetscScalar**,PetscReal*,AppCtx*);
86 extern PetscErrorCode FormFunctionMatlab(SNES,Vec,Vec,void*);
87 extern PetscErrorCode NonlinearGS(SNES,Vec,Vec,void*);
88 
89 int main(int argc,char **argv)
90 {
91   SNES           snes;                         /* nonlinear solver */
92   Vec            x;                            /* solution vector */
93   AppCtx         user;                         /* user-defined work context */
94   PetscInt       its;                          /* iterations for convergence */
95   PetscErrorCode ierr;
96   PetscReal      bratu_lambda_max = 6.81;
97   PetscReal      bratu_lambda_min = 0.;
98   PetscInt       MMS              = 0;
99   PetscBool      flg              = PETSC_FALSE;
100   DM             da;
101   Vec            r               = NULL;
102   KSP            ksp;
103   PetscInt       lits,slits;
104 
105   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
106      Initialize program
107      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
108 
109   ierr = PetscInitialize(&argc,&argv,(char*)0,help);if (ierr) return ierr;
110 
111   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
112      Initialize problem parameters
113   - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
114   user.param = 6.0;
115   ierr       = PetscOptionsGetReal(NULL,NULL,"-par",&user.param,NULL);CHKERRQ(ierr);
116   if (user.param > bratu_lambda_max || user.param < bratu_lambda_min) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Lambda, %g, is out of range, [%g, %g]", user.param, bratu_lambda_min, bratu_lambda_max);
117   ierr       = PetscOptionsGetInt(NULL,NULL,"-mms",&MMS,NULL);CHKERRQ(ierr);
118   if (MMS == 3) {
119     PetscInt mPar = 2, nPar = 1;
120     ierr = PetscOptionsGetInt(NULL,NULL,"-m_par",&mPar,NULL);CHKERRQ(ierr);
121     ierr = PetscOptionsGetInt(NULL,NULL,"-n_par",&nPar,NULL);CHKERRQ(ierr);
122     user.m = PetscPowInt(2,mPar);
123     user.n = PetscPowInt(2,nPar);
124   }
125 
126   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
127      Create nonlinear solver context
128      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
129   ierr = SNESCreate(PETSC_COMM_WORLD,&snes);CHKERRQ(ierr);
130   ierr = SNESSetCountersReset(snes,PETSC_FALSE);CHKERRQ(ierr);
131   ierr = SNESSetNGS(snes, NonlinearGS, NULL);CHKERRQ(ierr);
132 
133   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
134      Create distributed array (DMDA) to manage parallel grid and vectors
135   - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
136   ierr = DMDACreate2d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE,DMDA_STENCIL_STAR,4,4,PETSC_DECIDE,PETSC_DECIDE,1,1,NULL,NULL,&da);CHKERRQ(ierr);
137   ierr = DMSetFromOptions(da);CHKERRQ(ierr);
138   ierr = DMSetUp(da);CHKERRQ(ierr);
139   ierr = DMDASetUniformCoordinates(da, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0);CHKERRQ(ierr);
140   ierr = DMSetApplicationContext(da,&user);CHKERRQ(ierr);
141   ierr = SNESSetDM(snes,da);CHKERRQ(ierr);
142   /*  - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
143      Extract global vectors from DMDA; then duplicate for remaining
144      vectors that are the same types
145    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
146   ierr = DMCreateGlobalVector(da,&x);CHKERRQ(ierr);
147 
148   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
149      Set local function evaluation routine
150   - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
151   user.mms_solution = ZeroBCSolution;
152   switch (MMS) {
153   case 0: user.mms_solution = NULL; user.mms_forcing = NULL;CHKERRQ(ierr);
154   case 1: user.mms_solution = MMSSolution1; user.mms_forcing = MMSForcing1; break;
155   case 2: user.mms_solution = MMSSolution2; user.mms_forcing = MMSForcing2; break;
156   case 3: user.mms_solution = MMSSolution3; user.mms_forcing = MMSForcing3; break;
157   case 4: user.mms_solution = MMSSolution4; user.mms_forcing = MMSForcing4; break;
158   default: SETERRQ1(PETSC_COMM_WORLD,PETSC_ERR_USER,"Unknown MMS type %d",MMS);
159   }
160   ierr = DMDASNESSetFunctionLocal(da,INSERT_VALUES,(DMDASNESFunction)FormFunctionLocal,&user);CHKERRQ(ierr);
161   ierr = PetscOptionsGetBool(NULL,NULL,"-fd",&flg,NULL);CHKERRQ(ierr);
162   if (!flg) {
163     ierr = DMDASNESSetJacobianLocal(da,(DMDASNESJacobian)FormJacobianLocal,&user);CHKERRQ(ierr);
164   }
165 
166   ierr = PetscOptionsGetBool(NULL,NULL,"-obj",&flg,NULL);CHKERRQ(ierr);
167   if (flg) {
168     ierr = DMDASNESSetObjectiveLocal(da,(DMDASNESObjective)FormObjectiveLocal,&user);CHKERRQ(ierr);
169   }
170 
171   if (PetscDefined(HAVE_MATLAB_ENGINE)) {
172     PetscBool matlab_function = PETSC_FALSE;
173     ierr = PetscOptionsGetBool(NULL,NULL,"-matlab_function",&matlab_function,0);CHKERRQ(ierr);
174     if (matlab_function) {
175       ierr = VecDuplicate(x,&r);CHKERRQ(ierr);
176       ierr = SNESSetFunction(snes,r,FormFunctionMatlab,&user);CHKERRQ(ierr);
177     }
178   }
179 
180   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
181      Customize nonlinear solver; set runtime options
182    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
183   ierr = SNESSetFromOptions(snes);CHKERRQ(ierr);
184 
185   ierr = FormInitialGuess(da,&user,x);CHKERRQ(ierr);
186 
187   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
188      Solve nonlinear system
189      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
190   ierr = SNESSolve(snes,NULL,x);CHKERRQ(ierr);
191   ierr = SNESGetIterationNumber(snes,&its);CHKERRQ(ierr);
192 
193   ierr = SNESGetLinearSolveIterations(snes,&slits);CHKERRQ(ierr);
194   ierr = SNESGetKSP(snes,&ksp);CHKERRQ(ierr);
195   ierr = KSPGetTotalIterations(ksp,&lits);CHKERRQ(ierr);
196   if (lits != slits) SETERRQ2(PETSC_COMM_WORLD,PETSC_ERR_PLIB,"Number of total linear iterations reported by SNES %D does not match reported by KSP %D",slits,lits);
197   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
198    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
199 
200   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
201      If using MMS, check the l_2 error
202    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
203   if (MMS) {
204     Vec       e;
205     PetscReal errorl2, errorinf;
206     PetscInt  N;
207 
208     ierr = VecDuplicate(x, &e);CHKERRQ(ierr);
209     ierr = PetscObjectViewFromOptions((PetscObject) x, NULL, "-sol_view");CHKERRQ(ierr);
210     ierr = FormExactSolution(da, &user, e);CHKERRQ(ierr);
211     ierr = PetscObjectViewFromOptions((PetscObject) e, NULL, "-exact_view");CHKERRQ(ierr);
212     ierr = VecAXPY(e, -1.0, x);CHKERRQ(ierr);
213     ierr = PetscObjectViewFromOptions((PetscObject) e, NULL, "-error_view");CHKERRQ(ierr);
214     ierr = VecNorm(e, NORM_2, &errorl2);CHKERRQ(ierr);
215     ierr = VecNorm(e, NORM_INFINITY, &errorinf);CHKERRQ(ierr);
216     ierr = VecGetSize(e, &N);CHKERRQ(ierr);
217     ierr = PetscPrintf(PETSC_COMM_WORLD, "N: %D error L2 %g inf %g\n", N, (double) errorl2/PetscSqrtReal(N), (double) errorinf);CHKERRQ(ierr);
218     ierr = VecDestroy(&e);CHKERRQ(ierr);
219     ierr = PetscLogEventSetDof(SNES_Solve, 0, N);CHKERRQ(ierr);
220     ierr = PetscLogEventSetError(SNES_Solve, 0, errorl2/PetscSqrtReal(N));CHKERRQ(ierr);
221   }
222 
223   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
224      Free work space.  All PETSc objects should be destroyed when they
225      are no longer needed.
226    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
227   ierr = VecDestroy(&r);CHKERRQ(ierr);
228   ierr = VecDestroy(&x);CHKERRQ(ierr);
229   ierr = SNESDestroy(&snes);CHKERRQ(ierr);
230   ierr = DMDestroy(&da);CHKERRQ(ierr);
231   ierr = PetscFinalize();
232   return ierr;
233 }
234 /* ------------------------------------------------------------------- */
235 /*
236    FormInitialGuess - Forms initial approximation.
237 
238    Input Parameters:
239    da - The DM
240    user - user-defined application context
241 
242    Output Parameter:
243    X - vector
244  */
245 PetscErrorCode FormInitialGuess(DM da,AppCtx *user,Vec X)
246 {
247   PetscInt       i,j,Mx,My,xs,ys,xm,ym;
248   PetscErrorCode ierr;
249   PetscReal      lambda,temp1,temp,hx,hy;
250   PetscScalar    **x;
251 
252   PetscFunctionBeginUser;
253   ierr = DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE);CHKERRQ(ierr);
254 
255   lambda = user->param;
256   hx     = 1.0/(PetscReal)(Mx-1);
257   hy     = 1.0/(PetscReal)(My-1);
258   temp1  = lambda/(lambda + 1.0);
259 
260   /*
261      Get a pointer to vector data.
262        - For default PETSc vectors, VecGetArray() returns a pointer to
263          the data array.  Otherwise, the routine is implementation dependent.
264        - You MUST call VecRestoreArray() when you no longer need access to
265          the array.
266   */
267   ierr = DMDAVecGetArray(da,X,&x);CHKERRQ(ierr);
268 
269   /*
270      Get local grid boundaries (for 2-dimensional DMDA):
271        xs, ys   - starting grid indices (no ghost points)
272        xm, ym   - widths of local grid (no ghost points)
273 
274   */
275   ierr = DMDAGetCorners(da,&xs,&ys,NULL,&xm,&ym,NULL);CHKERRQ(ierr);
276 
277   /*
278      Compute initial guess over the locally owned part of the grid
279   */
280   for (j=ys; j<ys+ym; j++) {
281     temp = (PetscReal)(PetscMin(j,My-j-1))*hy;
282     for (i=xs; i<xs+xm; i++) {
283       if (i == 0 || j == 0 || i == Mx-1 || j == My-1) {
284         /* boundary conditions are all zero Dirichlet */
285         x[j][i] = 0.0;
286       } else {
287         x[j][i] = temp1*PetscSqrtReal(PetscMin((PetscReal)(PetscMin(i,Mx-i-1))*hx,temp));
288       }
289     }
290   }
291 
292   /*
293      Restore vector
294   */
295   ierr = DMDAVecRestoreArray(da,X,&x);CHKERRQ(ierr);
296   PetscFunctionReturn(0);
297 }
298 
299 /*
300   FormExactSolution - Forms MMS solution
301 
302   Input Parameters:
303   da - The DM
304   user - user-defined application context
305 
306   Output Parameter:
307   X - vector
308  */
309 PetscErrorCode FormExactSolution(DM da, AppCtx *user, Vec U)
310 {
311   DM             coordDA;
312   Vec            coordinates;
313   DMDACoor2d   **coords;
314   PetscScalar  **u;
315   PetscInt       xs, ys, xm, ym, i, j;
316   PetscErrorCode ierr;
317 
318   PetscFunctionBeginUser;
319   ierr = DMDAGetCorners(da, &xs, &ys, NULL, &xm, &ym, NULL);CHKERRQ(ierr);
320   ierr = DMGetCoordinateDM(da, &coordDA);CHKERRQ(ierr);
321   ierr = DMGetCoordinates(da, &coordinates);CHKERRQ(ierr);
322   ierr = DMDAVecGetArray(coordDA, coordinates, &coords);CHKERRQ(ierr);
323   ierr = DMDAVecGetArray(da, U, &u);CHKERRQ(ierr);
324   for (j = ys; j < ys+ym; ++j) {
325     for (i = xs; i < xs+xm; ++i) {
326       user->mms_solution(user,&coords[j][i],&u[j][i]);
327     }
328   }
329   ierr = DMDAVecRestoreArray(da, U, &u);CHKERRQ(ierr);
330   ierr = DMDAVecRestoreArray(coordDA, coordinates, &coords);CHKERRQ(ierr);
331   PetscFunctionReturn(0);
332 }
333 
334 PetscErrorCode ZeroBCSolution(AppCtx *user,const DMDACoor2d *c,PetscScalar *u)
335 {
336   u[0] = 0.;
337   return 0;
338 }
339 
340 /* The functions below evaluate the MMS solution u(x,y) and associated forcing
341 
342      f(x,y) = -u_xx - u_yy - lambda exp(u)
343 
344   such that u(x,y) is an exact solution with f(x,y) as the right hand side forcing term.
345  */
346 PetscErrorCode MMSSolution1(AppCtx *user,const DMDACoor2d *c,PetscScalar *u)
347 {
348   PetscReal x = PetscRealPart(c->x), y = PetscRealPart(c->y);
349   u[0] = x*(1 - x)*y*(1 - y);
350   PetscLogFlops(5);
351   return 0;
352 }
353 PetscErrorCode MMSForcing1(AppCtx *user,const DMDACoor2d *c,PetscScalar *f)
354 {
355   PetscReal x = PetscRealPart(c->x), y = PetscRealPart(c->y);
356   f[0] = 2*x*(1 - x) + 2*y*(1 - y) - user->param*PetscExpReal(x*(1 - x)*y*(1 - y));
357   return 0;
358 }
359 
360 PetscErrorCode MMSSolution2(AppCtx *user,const DMDACoor2d *c,PetscScalar *u)
361 {
362   PetscReal x = PetscRealPart(c->x), y = PetscRealPart(c->y);
363   u[0] = PetscSinReal(PETSC_PI*x)*PetscSinReal(PETSC_PI*y);
364   PetscLogFlops(5);
365   return 0;
366 }
367 PetscErrorCode MMSForcing2(AppCtx *user,const DMDACoor2d *c,PetscScalar *f)
368 {
369   PetscReal x = PetscRealPart(c->x), y = PetscRealPart(c->y);
370   f[0] = 2*PetscSqr(PETSC_PI)*PetscSinReal(PETSC_PI*x)*PetscSinReal(PETSC_PI*y) - user->param*PetscExpReal(PetscSinReal(PETSC_PI*x)*PetscSinReal(PETSC_PI*y));
371   return 0;
372 }
373 
374 PetscErrorCode MMSSolution3(AppCtx *user,const DMDACoor2d *c,PetscScalar *u)
375 {
376   PetscReal x = PetscRealPart(c->x), y = PetscRealPart(c->y);
377   u[0] = PetscSinReal(user->m*PETSC_PI*x*(1-y))*PetscSinReal(user->n*PETSC_PI*y*(1-x));
378   PetscLogFlops(5);
379   return 0;
380 }
381 PetscErrorCode MMSForcing3(AppCtx *user,const DMDACoor2d *c,PetscScalar *f)
382 {
383   PetscReal x = PetscRealPart(c->x), y = PetscRealPart(c->y);
384   PetscReal m = user->m, n = user->n, lambda = user->param;
385   f[0] = (-(PetscExpReal(PetscSinReal(m*PETSC_PI*x*(1 - y))*PetscSinReal(n*PETSC_PI*(1 - x)*y))*lambda)
386           + PetscSqr(PETSC_PI)*(-2*m*n*((-1 + x)*x + (-1 + y)*y)*PetscCosReal(m*PETSC_PI*x*(-1 + y))*PetscCosReal(n*PETSC_PI*(-1 + x)*y)
387                                 + (PetscSqr(m)*(PetscSqr(x) + PetscSqr(-1 + y)) + PetscSqr(n)*(PetscSqr(-1 + x) + PetscSqr(y)))
388                                 *PetscSinReal(m*PETSC_PI*x*(-1 + y))*PetscSinReal(n*PETSC_PI*(-1 + x)*y)));
389   return 0;
390 }
391 
392 PetscErrorCode MMSSolution4(AppCtx *user,const DMDACoor2d *c,PetscScalar *u)
393 {
394   const PetscReal Lx = 1.,Ly = 1.;
395   PetscReal x = PetscRealPart(c->x), y = PetscRealPart(c->y);
396   u[0] = (PetscPowReal(x,4)-PetscSqr(Lx)*PetscSqr(x))*(PetscPowReal(y,4)-PetscSqr(Ly)*PetscSqr(y));
397   PetscLogFlops(9);
398   return 0;
399 }
400 PetscErrorCode MMSForcing4(AppCtx *user,const DMDACoor2d *c,PetscScalar *f)
401 {
402   const PetscReal Lx = 1.,Ly = 1.;
403   PetscReal x = PetscRealPart(c->x), y = PetscRealPart(c->y);
404   f[0] = (2*PetscSqr(x)*(PetscSqr(x)-PetscSqr(Lx))*(PetscSqr(Ly)-6*PetscSqr(y))
405           + 2*PetscSqr(y)*(PetscSqr(Lx)-6*PetscSqr(x))*(PetscSqr(y)-PetscSqr(Ly))
406           - user->param*PetscExpReal((PetscPowReal(x,4)-PetscSqr(Lx)*PetscSqr(x))*(PetscPowReal(y,4)-PetscSqr(Ly)*PetscSqr(y))));
407   return 0;
408 }
409 
410 /* ------------------------------------------------------------------- */
411 /*
412    FormFunctionLocal - Evaluates nonlinear function, F(x) on local process patch
413 
414  */
415 PetscErrorCode FormFunctionLocal(DMDALocalInfo *info,PetscScalar **x,PetscScalar **f,AppCtx *user)
416 {
417   PetscErrorCode ierr;
418   PetscInt       i,j;
419   PetscReal      lambda,hx,hy,hxdhy,hydhx;
420   PetscScalar    u,ue,uw,un,us,uxx,uyy,mms_solution,mms_forcing;
421   DMDACoor2d     c;
422 
423   PetscFunctionBeginUser;
424   lambda = user->param;
425   hx     = 1.0/(PetscReal)(info->mx-1);
426   hy     = 1.0/(PetscReal)(info->my-1);
427   hxdhy  = hx/hy;
428   hydhx  = hy/hx;
429   /*
430      Compute function over the locally owned part of the grid
431   */
432   for (j=info->ys; j<info->ys+info->ym; j++) {
433     for (i=info->xs; i<info->xs+info->xm; i++) {
434       if (i == 0 || j == 0 || i == info->mx-1 || j == info->my-1) {
435         c.x = i*hx; c.y = j*hy;
436         ierr = user->mms_solution(user,&c,&mms_solution);CHKERRQ(ierr);
437         f[j][i] = 2.0*(hydhx+hxdhy)*(x[j][i] - mms_solution);
438       } else {
439         u  = x[j][i];
440         uw = x[j][i-1];
441         ue = x[j][i+1];
442         un = x[j-1][i];
443         us = x[j+1][i];
444 
445         /* Enforce boundary conditions at neighboring points -- setting these values causes the Jacobian to be symmetric. */
446         if (i-1 == 0) {c.x = (i-1)*hx; c.y = j*hy; ierr = user->mms_solution(user,&c,&uw);CHKERRQ(ierr);}
447         if (i+1 == info->mx-1) {c.x = (i+1)*hx; c.y = j*hy; ierr = user->mms_solution(user,&c,&ue);CHKERRQ(ierr);}
448         if (j-1 == 0) {c.x = i*hx; c.y = (j-1)*hy; ierr = user->mms_solution(user,&c,&un);CHKERRQ(ierr);}
449         if (j+1 == info->my-1) {c.x = i*hx; c.y = (j+1)*hy; ierr = user->mms_solution(user,&c,&us);CHKERRQ(ierr);}
450 
451         uxx     = (2.0*u - uw - ue)*hydhx;
452         uyy     = (2.0*u - un - us)*hxdhy;
453         mms_forcing = 0;
454         c.x = i*hx; c.y = j*hy;
455         if (user->mms_forcing) {ierr = user->mms_forcing(user,&c,&mms_forcing);CHKERRQ(ierr);}
456         f[j][i] = uxx + uyy - hx*hy*(lambda*PetscExpScalar(u) + mms_forcing);
457       }
458     }
459   }
460   ierr = PetscLogFlops(11.0*info->ym*info->xm);CHKERRQ(ierr);
461   PetscFunctionReturn(0);
462 }
463 
464 /* FormObjectiveLocal - Evaluates nonlinear function, F(x) on local process patch */
465 PetscErrorCode FormObjectiveLocal(DMDALocalInfo *info,PetscScalar **x,PetscReal *obj,AppCtx *user)
466 {
467   PetscErrorCode ierr;
468   PetscInt       i,j;
469   PetscReal      lambda,hx,hy,hxdhy,hydhx,sc,lobj=0;
470   PetscScalar    u,ue,uw,un,us,uxux,uyuy;
471   MPI_Comm       comm;
472 
473   PetscFunctionBeginUser;
474   *obj   = 0;
475   ierr = PetscObjectGetComm((PetscObject)info->da,&comm);CHKERRQ(ierr);
476   lambda = user->param;
477   hx     = 1.0/(PetscReal)(info->mx-1);
478   hy     = 1.0/(PetscReal)(info->my-1);
479   sc     = hx*hy*lambda;
480   hxdhy  = hx/hy;
481   hydhx  = hy/hx;
482   /*
483      Compute function over the locally owned part of the grid
484   */
485   for (j=info->ys; j<info->ys+info->ym; j++) {
486     for (i=info->xs; i<info->xs+info->xm; i++) {
487       if (i == 0 || j == 0 || i == info->mx-1 || j == info->my-1) {
488         lobj += PetscRealPart((hydhx + hxdhy)*x[j][i]*x[j][i]);
489       } else {
490         u  = x[j][i];
491         uw = x[j][i-1];
492         ue = x[j][i+1];
493         un = x[j-1][i];
494         us = x[j+1][i];
495 
496         if (i-1 == 0) uw = 0.;
497         if (i+1 == info->mx-1) ue = 0.;
498         if (j-1 == 0) un = 0.;
499         if (j+1 == info->my-1) us = 0.;
500 
501         /* F[u] = 1/2\int_{\omega}\nabla^2u(x)*u(x)*dx */
502 
503         uxux = u*(2.*u - ue - uw)*hydhx;
504         uyuy = u*(2.*u - un - us)*hxdhy;
505 
506         lobj += PetscRealPart(0.5*(uxux + uyuy) - sc*PetscExpScalar(u));
507       }
508     }
509   }
510   ierr = PetscLogFlops(12.0*info->ym*info->xm);CHKERRQ(ierr);
511   ierr = MPI_Allreduce(&lobj,obj,1,MPIU_REAL,MPIU_SUM,comm);CHKERRMPI(ierr);
512   PetscFunctionReturn(0);
513 }
514 
515 /*
516    FormJacobianLocal - Evaluates Jacobian matrix on local process patch
517 */
518 PetscErrorCode FormJacobianLocal(DMDALocalInfo *info,PetscScalar **x,Mat jac,Mat jacpre,AppCtx *user)
519 {
520   PetscErrorCode ierr;
521   PetscInt       i,j,k;
522   MatStencil     col[5],row;
523   PetscScalar    lambda,v[5],hx,hy,hxdhy,hydhx,sc;
524   DM             coordDA;
525   Vec            coordinates;
526   DMDACoor2d   **coords;
527 
528   PetscFunctionBeginUser;
529   lambda = user->param;
530   /* Extract coordinates */
531   ierr = DMGetCoordinateDM(info->da, &coordDA);CHKERRQ(ierr);
532   ierr = DMGetCoordinates(info->da, &coordinates);CHKERRQ(ierr);
533   ierr = DMDAVecGetArray(coordDA, coordinates, &coords);CHKERRQ(ierr);
534   hx     = info->xm > 1 ? PetscRealPart(coords[info->ys][info->xs+1].x) - PetscRealPart(coords[info->ys][info->xs].x) : 1.0;
535   hy     = info->ym > 1 ? PetscRealPart(coords[info->ys+1][info->xs].y) - PetscRealPart(coords[info->ys][info->xs].y) : 1.0;
536   ierr = DMDAVecRestoreArray(coordDA, coordinates, &coords);CHKERRQ(ierr);
537   hxdhy  = hx/hy;
538   hydhx  = hy/hx;
539   sc     = hx*hy*lambda;
540 
541   /*
542      Compute entries for the locally owned part of the Jacobian.
543       - Currently, all PETSc parallel matrix formats are partitioned by
544         contiguous chunks of rows across the processors.
545       - Each processor needs to insert only elements that it owns
546         locally (but any non-local elements will be sent to the
547         appropriate processor during matrix assembly).
548       - Here, we set all entries for a particular row at once.
549       - We can set matrix entries either using either
550         MatSetValuesLocal() or MatSetValues(), as discussed above.
551   */
552   for (j=info->ys; j<info->ys+info->ym; j++) {
553     for (i=info->xs; i<info->xs+info->xm; i++) {
554       row.j = j; row.i = i;
555       /* boundary points */
556       if (i == 0 || j == 0 || i == info->mx-1 || j == info->my-1) {
557         v[0] =  2.0*(hydhx + hxdhy);
558         ierr = MatSetValuesStencil(jacpre,1,&row,1,&row,v,INSERT_VALUES);CHKERRQ(ierr);
559       } else {
560         k = 0;
561         /* interior grid points */
562         if (j-1 != 0) {
563           v[k]     = -hxdhy;
564           col[k].j = j - 1; col[k].i = i;
565           k++;
566         }
567         if (i-1 != 0) {
568           v[k]     = -hydhx;
569           col[k].j = j;     col[k].i = i-1;
570           k++;
571         }
572 
573         v[k] = 2.0*(hydhx + hxdhy) - sc*PetscExpScalar(x[j][i]); col[k].j = row.j; col[k].i = row.i; k++;
574 
575         if (i+1 != info->mx-1) {
576           v[k]     = -hydhx;
577           col[k].j = j;     col[k].i = i+1;
578           k++;
579         }
580         if (j+1 != info->mx-1) {
581           v[k]     = -hxdhy;
582           col[k].j = j + 1; col[k].i = i;
583           k++;
584         }
585         ierr = MatSetValuesStencil(jacpre,1,&row,k,col,v,INSERT_VALUES);CHKERRQ(ierr);
586       }
587     }
588   }
589 
590   /*
591      Assemble matrix, using the 2-step process:
592        MatAssemblyBegin(), MatAssemblyEnd().
593   */
594   ierr = MatAssemblyBegin(jacpre,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
595   ierr = MatAssemblyEnd(jacpre,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
596   /*
597      Tell the matrix we will never add a new nonzero location to the
598      matrix. If we do, it will generate an error.
599   */
600   ierr = MatSetOption(jac,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE);CHKERRQ(ierr);
601   PetscFunctionReturn(0);
602 }
603 
604 PetscErrorCode FormFunctionMatlab(SNES snes,Vec X,Vec F,void *ptr)
605 {
606 #if PetscDefined(HAVE_MATLAB_ENGINE)
607   AppCtx         *user = (AppCtx*)ptr;
608   PetscErrorCode ierr;
609   PetscInt       Mx,My;
610   PetscReal      lambda,hx,hy;
611   Vec            localX,localF;
612   MPI_Comm       comm;
613   DM             da;
614 
615   PetscFunctionBeginUser;
616   ierr = SNESGetDM(snes,&da);CHKERRQ(ierr);
617   ierr = DMGetLocalVector(da,&localX);CHKERRQ(ierr);
618   ierr = DMGetLocalVector(da,&localF);CHKERRQ(ierr);
619   ierr = PetscObjectSetName((PetscObject)localX,"localX");CHKERRQ(ierr);
620   ierr = PetscObjectSetName((PetscObject)localF,"localF");CHKERRQ(ierr);
621   ierr = DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE);CHKERRQ(ierr);
622 
623   lambda = user->param;
624   hx     = 1.0/(PetscReal)(Mx-1);
625   hy     = 1.0/(PetscReal)(My-1);
626 
627   ierr = PetscObjectGetComm((PetscObject)snes,&comm);CHKERRQ(ierr);
628   /*
629      Scatter ghost points to local vector,using the 2-step process
630         DMGlobalToLocalBegin(),DMGlobalToLocalEnd().
631      By placing code between these two statements, computations can be
632      done while messages are in transition.
633   */
634   ierr = DMGlobalToLocalBegin(da,X,INSERT_VALUES,localX);CHKERRQ(ierr);
635   ierr = DMGlobalToLocalEnd(da,X,INSERT_VALUES,localX);CHKERRQ(ierr);
636   ierr = PetscMatlabEnginePut(PETSC_MATLAB_ENGINE_(comm),(PetscObject)localX);CHKERRQ(ierr);
637   ierr = PetscMatlabEngineEvaluate(PETSC_MATLAB_ENGINE_(comm),"localF=ex5m(localX,%18.16e,%18.16e,%18.16e)",hx,hy,lambda);CHKERRQ(ierr);
638   ierr = PetscMatlabEngineGet(PETSC_MATLAB_ENGINE_(comm),(PetscObject)localF);CHKERRQ(ierr);
639 
640   /*
641      Insert values into global vector
642   */
643   ierr = DMLocalToGlobalBegin(da,localF,INSERT_VALUES,F);CHKERRQ(ierr);
644   ierr = DMLocalToGlobalEnd(da,localF,INSERT_VALUES,F);CHKERRQ(ierr);
645   ierr = DMRestoreLocalVector(da,&localX);CHKERRQ(ierr);
646   ierr = DMRestoreLocalVector(da,&localF);CHKERRQ(ierr);
647   PetscFunctionReturn(0);
648 #else
649     return 0;                     /* Never called */
650 #endif
651 }
652 
653 /* ------------------------------------------------------------------- */
654 /*
655       Applies some sweeps on nonlinear Gauss-Seidel on each process
656 
657  */
658 PetscErrorCode NonlinearGS(SNES snes,Vec X, Vec B, void *ctx)
659 {
660   PetscInt       i,j,k,Mx,My,xs,ys,xm,ym,its,tot_its,sweeps,l;
661   PetscErrorCode ierr;
662   PetscReal      lambda,hx,hy,hxdhy,hydhx,sc;
663   PetscScalar    **x,**b,bij,F,F0=0,J,u,un,us,ue,eu,uw,uxx,uyy,y;
664   PetscReal      atol,rtol,stol;
665   DM             da;
666   AppCtx         *user;
667   Vec            localX,localB;
668 
669   PetscFunctionBeginUser;
670   tot_its = 0;
671   ierr    = SNESNGSGetSweeps(snes,&sweeps);CHKERRQ(ierr);
672   ierr    = SNESNGSGetTolerances(snes,&atol,&rtol,&stol,&its);CHKERRQ(ierr);
673   ierr    = SNESGetDM(snes,&da);CHKERRQ(ierr);
674   ierr    = DMGetApplicationContext(da,&user);CHKERRQ(ierr);
675 
676   ierr = DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE);CHKERRQ(ierr);
677 
678   lambda = user->param;
679   hx     = 1.0/(PetscReal)(Mx-1);
680   hy     = 1.0/(PetscReal)(My-1);
681   sc     = hx*hy*lambda;
682   hxdhy  = hx/hy;
683   hydhx  = hy/hx;
684 
685   ierr = DMGetLocalVector(da,&localX);CHKERRQ(ierr);
686   if (B) {
687     ierr = DMGetLocalVector(da,&localB);CHKERRQ(ierr);
688   }
689   for (l=0; l<sweeps; l++) {
690 
691     ierr = DMGlobalToLocalBegin(da,X,INSERT_VALUES,localX);CHKERRQ(ierr);
692     ierr = DMGlobalToLocalEnd(da,X,INSERT_VALUES,localX);CHKERRQ(ierr);
693     if (B) {
694       ierr = DMGlobalToLocalBegin(da,B,INSERT_VALUES,localB);CHKERRQ(ierr);
695       ierr = DMGlobalToLocalEnd(da,B,INSERT_VALUES,localB);CHKERRQ(ierr);
696     }
697     /*
698      Get a pointer to vector data.
699      - For default PETSc vectors, VecGetArray() returns a pointer to
700      the data array.  Otherwise, the routine is implementation dependent.
701      - You MUST call VecRestoreArray() when you no longer need access to
702      the array.
703      */
704     ierr = DMDAVecGetArray(da,localX,&x);CHKERRQ(ierr);
705     if (B) {ierr = DMDAVecGetArray(da,localB,&b);CHKERRQ(ierr);}
706     /*
707      Get local grid boundaries (for 2-dimensional DMDA):
708      xs, ys   - starting grid indices (no ghost points)
709      xm, ym   - widths of local grid (no ghost points)
710      */
711     ierr = DMDAGetCorners(da,&xs,&ys,NULL,&xm,&ym,NULL);CHKERRQ(ierr);
712 
713     for (j=ys; j<ys+ym; j++) {
714       for (i=xs; i<xs+xm; i++) {
715         if (i == 0 || j == 0 || i == Mx-1 || j == My-1) {
716           /* boundary conditions are all zero Dirichlet */
717           x[j][i] = 0.0;
718         } else {
719           if (B) bij = b[j][i];
720           else   bij = 0.;
721 
722           u  = x[j][i];
723           un = x[j-1][i];
724           us = x[j+1][i];
725           ue = x[j][i-1];
726           uw = x[j][i+1];
727 
728           for (k=0; k<its; k++) {
729             eu  = PetscExpScalar(u);
730             uxx = (2.0*u - ue - uw)*hydhx;
731             uyy = (2.0*u - un - us)*hxdhy;
732             F   = uxx + uyy - sc*eu - bij;
733             if (k == 0) F0 = F;
734             J  = 2.0*(hydhx + hxdhy) - sc*eu;
735             y  = F/J;
736             u -= y;
737             tot_its++;
738 
739             if (atol > PetscAbsReal(PetscRealPart(F)) ||
740                 rtol*PetscAbsReal(PetscRealPart(F0)) > PetscAbsReal(PetscRealPart(F)) ||
741                 stol*PetscAbsReal(PetscRealPart(u)) > PetscAbsReal(PetscRealPart(y))) {
742               break;
743             }
744           }
745           x[j][i] = u;
746         }
747       }
748     }
749     /*
750      Restore vector
751      */
752     ierr = DMDAVecRestoreArray(da,localX,&x);CHKERRQ(ierr);
753     ierr = DMLocalToGlobalBegin(da,localX,INSERT_VALUES,X);CHKERRQ(ierr);
754     ierr = DMLocalToGlobalEnd(da,localX,INSERT_VALUES,X);CHKERRQ(ierr);
755   }
756   ierr = PetscLogFlops(tot_its*(21.0));CHKERRQ(ierr);
757   ierr = DMRestoreLocalVector(da,&localX);CHKERRQ(ierr);
758   if (B) {
759     ierr = DMDAVecRestoreArray(da,localB,&b);CHKERRQ(ierr);
760     ierr = DMRestoreLocalVector(da,&localB);CHKERRQ(ierr);
761   }
762   PetscFunctionReturn(0);
763 }
764 
765 /*TEST
766 
767    test:
768      suffix: asm_0
769      requires: !single
770      args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 2 -pc_asm_overlap 0 -pc_asm_local_type additive -sub_pc_type lu
771 
772    test:
773      suffix: msm_0
774      requires: !single
775      args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 2 -pc_asm_overlap 0 -pc_asm_local_type multiplicative -sub_pc_type lu
776 
777    test:
778      suffix: asm_1
779      requires: !single
780      args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 2 -pc_asm_overlap 0 -pc_asm_local_type additive -sub_pc_type lu -da_grid_x 8
781 
782    test:
783      suffix: msm_1
784      requires: !single
785      args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 2 -pc_asm_overlap 0 -pc_asm_local_type multiplicative -sub_pc_type lu -da_grid_x 8
786 
787    test:
788      suffix: asm_2
789      requires: !single
790      args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 3 -pc_asm_overlap 0 -pc_asm_local_type additive -sub_pc_type lu -da_grid_x 8
791 
792    test:
793      suffix: msm_2
794      requires: !single
795      args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 3 -pc_asm_overlap 0 -pc_asm_local_type multiplicative -sub_pc_type lu -da_grid_x 8
796 
797    test:
798      suffix: asm_3
799      requires: !single
800      args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 4 -pc_asm_overlap 0 -pc_asm_local_type additive -sub_pc_type lu -da_grid_x 8
801 
802    test:
803      suffix: msm_3
804      requires: !single
805      args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 4 -pc_asm_overlap 0 -pc_asm_local_type multiplicative -sub_pc_type lu -da_grid_x 8
806 
807    test:
808      suffix: asm_4
809      requires: !single
810      nsize: 2
811      args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 2 -pc_asm_overlap 0 -pc_asm_local_type additive -sub_pc_type lu -da_grid_x 8
812 
813    test:
814      suffix: msm_4
815      requires: !single
816      nsize: 2
817      args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 2 -pc_asm_overlap 0 -pc_asm_local_type multiplicative -sub_pc_type lu -da_grid_x 8
818 
819    test:
820      suffix: asm_5
821      requires: !single
822      nsize: 2
823      args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 4 -pc_asm_overlap 0 -pc_asm_local_type additive -sub_pc_type lu -da_grid_x 8
824 
825    test:
826      suffix: msm_5
827      requires: !single
828      nsize: 2
829      args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 4 -pc_asm_overlap 0 -pc_asm_local_type multiplicative -sub_pc_type lu -da_grid_x 8
830 
831    test:
832      args: -snes_rtol 1.e-5 -pc_type mg -ksp_monitor_short -snes_view -pc_mg_levels 3 -pc_mg_galerkin pmat -da_grid_x 17 -da_grid_y 17 -mg_levels_ksp_monitor_short -mg_levels_ksp_norm_type unpreconditioned -snes_monitor_short -mg_levels_ksp_chebyshev_esteig 0.5,1.1 -mg_levels_pc_type sor -pc_mg_type full
833      requires: !single
834 
835    test:
836      suffix: 2
837      args: -pc_type mg -ksp_converged_reason -snes_view -pc_mg_galerkin pmat -snes_grid_sequence 3 -mg_levels_ksp_norm_type unpreconditioned -snes_monitor_short -mg_levels_ksp_chebyshev_esteig 0.5,1.1 -mg_levels_pc_type sor -pc_mg_type full -ksp_atol -1.
838 
839    test:
840      suffix: 3
841      nsize: 2
842      args: -snes_grid_sequence 2 -snes_mf_operator -snes_converged_reason -snes_view -pc_type mg -snes_atol -1 -snes_rtol 1.e-2
843      filter: grep -v "otal number of function evaluations"
844 
845    test:
846      suffix: 4
847      nsize: 2
848      args: -snes_grid_sequence 2 -snes_monitor_short -ksp_converged_reason -snes_converged_reason -snes_view -pc_type mg -snes_atol -1 -ksp_atol -1
849 
850    test:
851      suffix: 5_anderson
852      args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type anderson
853 
854    test:
855      suffix: 5_aspin
856      nsize: 4
857      args: -snes_monitor_short -ksp_monitor_short -snes_converged_reason -da_refine 4 -da_overlap 3 -snes_type aspin -snes_view
858 
859    test:
860      suffix: 5_broyden
861      args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type qn -snes_qn_type broyden -snes_qn_m 10
862 
863    test:
864      suffix: 5_fas
865      args: -fas_coarse_snes_max_it 1 -fas_coarse_pc_type lu -fas_coarse_ksp_type preonly -snes_monitor_short -snes_type fas -fas_coarse_ksp_type richardson -da_refine 6
866      requires: !single
867 
868    test:
869      suffix: 5_fas_additive
870      args: -fas_coarse_snes_max_it 1 -fas_coarse_pc_type lu -fas_coarse_ksp_type preonly -snes_monitor_short -snes_type fas -fas_coarse_ksp_type richardson -da_refine 6 -snes_fas_type additive -snes_max_it 50
871 
872    test:
873      suffix: 5_fas_monitor
874      args: -da_refine 1 -snes_type fas -snes_fas_monitor
875      requires: !single
876 
877    test:
878      suffix: 5_ls
879      args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type newtonls
880 
881    test:
882      suffix: 5_ls_sell_sor
883      args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type newtonls -dm_mat_type sell -pc_type sor
884      output_file: output/ex5_5_ls.out
885 
886    test:
887      suffix: 5_nasm
888      nsize: 4
889      args: -snes_monitor_short -snes_converged_reason -da_refine 4 -da_overlap 3 -snes_type nasm -snes_nasm_type restrict -snes_max_it 10
890 
891    test:
892      suffix: 5_ncg
893      args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type ncg -snes_ncg_type fr
894 
895    test:
896      suffix: 5_newton_asm_dmda
897      nsize: 4
898      args: -snes_monitor_short -ksp_monitor_short -snes_converged_reason -da_refine 4 -da_overlap 3 -snes_type newtonls -pc_type asm -pc_asm_dm_subdomains -malloc_dump
899      requires: !single
900 
901    test:
902      suffix: 5_newton_gasm_dmda
903      nsize: 4
904      args: -snes_monitor_short -ksp_monitor_short -snes_converged_reason -da_refine 4 -da_overlap 3 -snes_type newtonls -pc_type gasm -malloc_dump
905      requires: !single
906 
907    test:
908      suffix: 5_ngmres
909      args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type ngmres -snes_ngmres_m 10
910 
911    test:
912      suffix: 5_ngmres_fas
913      args: -snes_rtol 1.e-4 -snes_type ngmres -npc_fas_coarse_snes_max_it 1 -npc_fas_coarse_snes_type newtonls -npc_fas_coarse_pc_type lu -npc_fas_coarse_ksp_type preonly -snes_ngmres_m 10 -snes_monitor_short -npc_snes_max_it 1 -npc_snes_type fas -npc_fas_coarse_ksp_type richardson -da_refine 6
914 
915    test:
916      suffix: 5_ngmres_ngs
917      args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type ngmres -npc_snes_type ngs -npc_snes_max_it 1
918 
919    test:
920      suffix: 5_ngmres_nrichardson
921      args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type ngmres -snes_ngmres_m 10 -npc_snes_type nrichardson -npc_snes_max_it 3
922      output_file: output/ex5_5_ngmres_richardson.out
923 
924    test:
925      suffix: 5_nrichardson
926      args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type nrichardson
927 
928    test:
929      suffix: 5_qn
930      args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type qn -snes_linesearch_type cp -snes_qn_m 10
931 
932    test:
933      suffix: 6
934      nsize: 4
935      args: -snes_converged_reason -ksp_converged_reason -da_grid_x 129 -da_grid_y 129 -pc_type mg -pc_mg_levels 8 -mg_levels_ksp_type chebyshev -mg_levels_ksp_chebyshev_esteig 0,0.5,0,1.1 -mg_levels_ksp_max_it 2
936 
937    test:
938      requires: complex !single
939      suffix: complex
940      args: -snes_mf_operator -mat_mffd_complex -snes_monitor
941 
942 TEST*/
943