xref: /petsc/src/snes/tutorials/ex5.c (revision a69119a591a03a9d906b29c0a4e9802e4d7c9795)
1 static char help[] = "Bratu nonlinear PDE in 2d.\n\
2 We solve the  Bratu (SFI - solid fuel ignition) problem in a 2D rectangular\n\
3 domain, using distributed arrays (DMDAs) to partition the parallel grid.\n\
4 The command line options include:\n\
5   -par <parameter>, where <parameter> indicates the problem's nonlinearity\n\
6      problem SFI:  <parameter> = Bratu parameter (0 <= par <= 6.81)\n\n\
7   -m_par/n_par <parameter>, where <parameter> indicates an integer\n \
8       that MMS3 will be evaluated with 2^m_par, 2^n_par";
9 
10 /* ------------------------------------------------------------------------
11 
12     Solid Fuel Ignition (SFI) problem.  This problem is modeled by
13     the partial differential equation
14 
15             -Laplacian u - lambda*exp(u) = 0,  0 < x,y < 1,
16 
17     with boundary conditions
18 
19              u = 0  for  x = 0, x = 1, y = 0, y = 1.
20 
21     A finite difference approximation with the usual 5-point stencil
22     is used to discretize the boundary value problem to obtain a nonlinear
23     system of equations.
24 
25       This example shows how geometric multigrid can be run transparently with a nonlinear solver so long
26       as SNESSetDM() is provided. Example usage
27 
28       ./ex5 -pc_type mg -ksp_monitor  -snes_view -pc_mg_levels 3 -pc_mg_galerkin pmat -da_grid_x 17 -da_grid_y 17
29              -mg_levels_ksp_monitor -snes_monitor -mg_levels_pc_type sor -pc_mg_type full
30 
31       or to run with grid sequencing on the nonlinear problem (note that you do not need to provide the number of
32          multigrid levels, it will be determined automatically based on the number of refinements done)
33 
34       ./ex5 -pc_type mg -ksp_monitor  -snes_view -pc_mg_galerkin pmat -snes_grid_sequence 3
35              -mg_levels_ksp_monitor -snes_monitor -mg_levels_pc_type sor -pc_mg_type full
36 
37   ------------------------------------------------------------------------- */
38 
39 /*
40    Include "petscdmda.h" so that we can use distributed arrays (DMDAs).
41    Include "petscsnes.h" so that we can use SNES solvers.  Note that this
42 */
43 #include <petscdm.h>
44 #include <petscdmda.h>
45 #include <petscsnes.h>
46 #include <petscmatlab.h>
47 #include <petsc/private/snesimpl.h> /* For SNES_Solve event */
48 
49 /*
50    User-defined application context - contains data needed by the
51    application-provided call-back routines, FormJacobianLocal() and
52    FormFunctionLocal().
53 */
54 typedef struct AppCtx AppCtx;
55 struct AppCtx {
56   PetscReal param; /* test problem parameter */
57   PetscInt  m, n;  /* MMS3 parameters */
58   PetscErrorCode (*mms_solution)(AppCtx *, const DMDACoor2d *, PetscScalar *);
59   PetscErrorCode (*mms_forcing)(AppCtx *, const DMDACoor2d *, PetscScalar *);
60 };
61 
62 /* ------------------------------------------------------------------- */
63 /*
64    FormInitialGuess - Forms initial approximation.
65 
66    Input Parameters:
67    da - The DM
68    user - user-defined application context
69 
70    Output Parameter:
71    X - vector
72  */
73 static PetscErrorCode FormInitialGuess(DM da, AppCtx *user, Vec X) {
74   PetscInt      i, j, Mx, My, xs, ys, xm, ym;
75   PetscReal     lambda, temp1, temp, hx, hy;
76   PetscScalar **x;
77 
78   PetscFunctionBeginUser;
79   PetscCall(DMDAGetInfo(da, PETSC_IGNORE, &Mx, &My, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE));
80 
81   lambda = user->param;
82   hx     = 1.0 / (PetscReal)(Mx - 1);
83   hy     = 1.0 / (PetscReal)(My - 1);
84   temp1  = lambda / (lambda + 1.0);
85 
86   /*
87      Get a pointer to vector data.
88        - For default PETSc vectors, VecGetArray() returns a pointer to
89          the data array.  Otherwise, the routine is implementation dependent.
90        - You MUST call VecRestoreArray() when you no longer need access to
91          the array.
92   */
93   PetscCall(DMDAVecGetArray(da, X, &x));
94 
95   /*
96      Get local grid boundaries (for 2-dimensional DMDA):
97        xs, ys   - starting grid indices (no ghost points)
98        xm, ym   - widths of local grid (no ghost points)
99 
100   */
101   PetscCall(DMDAGetCorners(da, &xs, &ys, NULL, &xm, &ym, NULL));
102 
103   /*
104      Compute initial guess over the locally owned part of the grid
105   */
106   for (j = ys; j < ys + ym; j++) {
107     temp = (PetscReal)(PetscMin(j, My - j - 1)) * hy;
108     for (i = xs; i < xs + xm; i++) {
109       if (i == 0 || j == 0 || i == Mx - 1 || j == My - 1) {
110         /* boundary conditions are all zero Dirichlet */
111         x[j][i] = 0.0;
112       } else {
113         x[j][i] = temp1 * PetscSqrtReal(PetscMin((PetscReal)(PetscMin(i, Mx - i - 1)) * hx, temp));
114       }
115     }
116   }
117 
118   /*
119      Restore vector
120   */
121   PetscCall(DMDAVecRestoreArray(da, X, &x));
122   PetscFunctionReturn(0);
123 }
124 
125 /*
126   FormExactSolution - Forms MMS solution
127 
128   Input Parameters:
129   da - The DM
130   user - user-defined application context
131 
132   Output Parameter:
133   X - vector
134  */
135 static PetscErrorCode FormExactSolution(DM da, AppCtx *user, Vec U) {
136   DM            coordDA;
137   Vec           coordinates;
138   DMDACoor2d  **coords;
139   PetscScalar **u;
140   PetscInt      xs, ys, xm, ym, i, j;
141 
142   PetscFunctionBeginUser;
143   PetscCall(DMDAGetCorners(da, &xs, &ys, NULL, &xm, &ym, NULL));
144   PetscCall(DMGetCoordinateDM(da, &coordDA));
145   PetscCall(DMGetCoordinates(da, &coordinates));
146   PetscCall(DMDAVecGetArray(coordDA, coordinates, &coords));
147   PetscCall(DMDAVecGetArray(da, U, &u));
148   for (j = ys; j < ys + ym; ++j) {
149     for (i = xs; i < xs + xm; ++i) user->mms_solution(user, &coords[j][i], &u[j][i]);
150   }
151   PetscCall(DMDAVecRestoreArray(da, U, &u));
152   PetscCall(DMDAVecRestoreArray(coordDA, coordinates, &coords));
153   PetscFunctionReturn(0);
154 }
155 
156 static PetscErrorCode ZeroBCSolution(AppCtx *user, const DMDACoor2d *c, PetscScalar *u) {
157   u[0] = 0.;
158   return 0;
159 }
160 
161 /* The functions below evaluate the MMS solution u(x,y) and associated forcing
162 
163      f(x,y) = -u_xx - u_yy - lambda exp(u)
164 
165   such that u(x,y) is an exact solution with f(x,y) as the right hand side forcing term.
166  */
167 static PetscErrorCode MMSSolution1(AppCtx *user, const DMDACoor2d *c, PetscScalar *u) {
168   PetscReal x = PetscRealPart(c->x), y = PetscRealPart(c->y);
169   u[0] = x * (1 - x) * y * (1 - y);
170   PetscLogFlops(5);
171   return 0;
172 }
173 static PetscErrorCode MMSForcing1(AppCtx *user, const DMDACoor2d *c, PetscScalar *f) {
174   PetscReal x = PetscRealPart(c->x), y = PetscRealPart(c->y);
175   f[0] = 2 * x * (1 - x) + 2 * y * (1 - y) - user->param * PetscExpReal(x * (1 - x) * y * (1 - y));
176   return 0;
177 }
178 
179 static PetscErrorCode MMSSolution2(AppCtx *user, const DMDACoor2d *c, PetscScalar *u) {
180   PetscReal x = PetscRealPart(c->x), y = PetscRealPart(c->y);
181   u[0] = PetscSinReal(PETSC_PI * x) * PetscSinReal(PETSC_PI * y);
182   PetscLogFlops(5);
183   return 0;
184 }
185 static PetscErrorCode MMSForcing2(AppCtx *user, const DMDACoor2d *c, PetscScalar *f) {
186   PetscReal x = PetscRealPart(c->x), y = PetscRealPart(c->y);
187   f[0] = 2 * PetscSqr(PETSC_PI) * PetscSinReal(PETSC_PI * x) * PetscSinReal(PETSC_PI * y) - user->param * PetscExpReal(PetscSinReal(PETSC_PI * x) * PetscSinReal(PETSC_PI * y));
188   return 0;
189 }
190 
191 static PetscErrorCode MMSSolution3(AppCtx *user, const DMDACoor2d *c, PetscScalar *u) {
192   PetscReal x = PetscRealPart(c->x), y = PetscRealPart(c->y);
193   u[0] = PetscSinReal(user->m * PETSC_PI * x * (1 - y)) * PetscSinReal(user->n * PETSC_PI * y * (1 - x));
194   PetscLogFlops(5);
195   return 0;
196 }
197 static PetscErrorCode MMSForcing3(AppCtx *user, const DMDACoor2d *c, PetscScalar *f) {
198   PetscReal x = PetscRealPart(c->x), y = PetscRealPart(c->y);
199   PetscReal m = user->m, n = user->n, lambda = user->param;
200   f[0] = (-(PetscExpReal(PetscSinReal(m * PETSC_PI * x * (1 - y)) * PetscSinReal(n * PETSC_PI * (1 - x) * y)) * lambda) + PetscSqr(PETSC_PI) * (-2 * m * n * ((-1 + x) * x + (-1 + y) * y) * PetscCosReal(m * PETSC_PI * x * (-1 + y)) * PetscCosReal(n * PETSC_PI * (-1 + x) * y) + (PetscSqr(m) * (PetscSqr(x) + PetscSqr(-1 + y)) + PetscSqr(n) * (PetscSqr(-1 + x) + PetscSqr(y))) * PetscSinReal(m * PETSC_PI * x * (-1 + y)) * PetscSinReal(n * PETSC_PI * (-1 + x) * y)));
201   return 0;
202 }
203 
204 static PetscErrorCode MMSSolution4(AppCtx *user, const DMDACoor2d *c, PetscScalar *u) {
205   const PetscReal Lx = 1., Ly = 1.;
206   PetscReal       x = PetscRealPart(c->x), y = PetscRealPart(c->y);
207   u[0] = (PetscPowReal(x, 4) - PetscSqr(Lx) * PetscSqr(x)) * (PetscPowReal(y, 4) - PetscSqr(Ly) * PetscSqr(y));
208   PetscLogFlops(9);
209   return 0;
210 }
211 static PetscErrorCode MMSForcing4(AppCtx *user, const DMDACoor2d *c, PetscScalar *f) {
212   const PetscReal Lx = 1., Ly = 1.;
213   PetscReal       x = PetscRealPart(c->x), y = PetscRealPart(c->y);
214   f[0] = (2 * PetscSqr(x) * (PetscSqr(x) - PetscSqr(Lx)) * (PetscSqr(Ly) - 6 * PetscSqr(y)) + 2 * PetscSqr(y) * (PetscSqr(Lx) - 6 * PetscSqr(x)) * (PetscSqr(y) - PetscSqr(Ly)) - user->param * PetscExpReal((PetscPowReal(x, 4) - PetscSqr(Lx) * PetscSqr(x)) * (PetscPowReal(y, 4) - PetscSqr(Ly) * PetscSqr(y))));
215   return 0;
216 }
217 
218 /* ------------------------------------------------------------------- */
219 /*
220    FormFunctionLocal - Evaluates nonlinear function, F(x) on local process patch
221 
222  */
223 static PetscErrorCode FormFunctionLocal(DMDALocalInfo *info, PetscScalar **x, PetscScalar **f, AppCtx *user) {
224   PetscInt    i, j;
225   PetscReal   lambda, hx, hy, hxdhy, hydhx;
226   PetscScalar u, ue, uw, un, us, uxx, uyy, mms_solution, mms_forcing;
227   DMDACoor2d  c;
228 
229   PetscFunctionBeginUser;
230   lambda = user->param;
231   hx     = 1.0 / (PetscReal)(info->mx - 1);
232   hy     = 1.0 / (PetscReal)(info->my - 1);
233   hxdhy  = hx / hy;
234   hydhx  = hy / hx;
235   /*
236      Compute function over the locally owned part of the grid
237   */
238   for (j = info->ys; j < info->ys + info->ym; j++) {
239     for (i = info->xs; i < info->xs + info->xm; i++) {
240       if (i == 0 || j == 0 || i == info->mx - 1 || j == info->my - 1) {
241         c.x = i * hx;
242         c.y = j * hy;
243         PetscCall(user->mms_solution(user, &c, &mms_solution));
244         f[j][i] = 2.0 * (hydhx + hxdhy) * (x[j][i] - mms_solution);
245       } else {
246         u  = x[j][i];
247         uw = x[j][i - 1];
248         ue = x[j][i + 1];
249         un = x[j - 1][i];
250         us = x[j + 1][i];
251 
252         /* Enforce boundary conditions at neighboring points -- setting these values causes the Jacobian to be symmetric. */
253         if (i - 1 == 0) {
254           c.x = (i - 1) * hx;
255           c.y = j * hy;
256           PetscCall(user->mms_solution(user, &c, &uw));
257         }
258         if (i + 1 == info->mx - 1) {
259           c.x = (i + 1) * hx;
260           c.y = j * hy;
261           PetscCall(user->mms_solution(user, &c, &ue));
262         }
263         if (j - 1 == 0) {
264           c.x = i * hx;
265           c.y = (j - 1) * hy;
266           PetscCall(user->mms_solution(user, &c, &un));
267         }
268         if (j + 1 == info->my - 1) {
269           c.x = i * hx;
270           c.y = (j + 1) * hy;
271           PetscCall(user->mms_solution(user, &c, &us));
272         }
273 
274         uxx         = (2.0 * u - uw - ue) * hydhx;
275         uyy         = (2.0 * u - un - us) * hxdhy;
276         mms_forcing = 0;
277         c.x         = i * hx;
278         c.y         = j * hy;
279         if (user->mms_forcing) PetscCall(user->mms_forcing(user, &c, &mms_forcing));
280         f[j][i] = uxx + uyy - hx * hy * (lambda * PetscExpScalar(u) + mms_forcing);
281       }
282     }
283   }
284   PetscCall(PetscLogFlops(11.0 * info->ym * info->xm));
285   PetscFunctionReturn(0);
286 }
287 
288 /* FormObjectiveLocal - Evaluates nonlinear function, F(x) on local process patch */
289 static PetscErrorCode FormObjectiveLocal(DMDALocalInfo *info, PetscScalar **x, PetscReal *obj, AppCtx *user) {
290   PetscInt    i, j;
291   PetscReal   lambda, hx, hy, hxdhy, hydhx, sc, lobj = 0;
292   PetscScalar u, ue, uw, un, us, uxux, uyuy;
293   MPI_Comm    comm;
294 
295   PetscFunctionBeginUser;
296   *obj = 0;
297   PetscCall(PetscObjectGetComm((PetscObject)info->da, &comm));
298   lambda = user->param;
299   hx     = 1.0 / (PetscReal)(info->mx - 1);
300   hy     = 1.0 / (PetscReal)(info->my - 1);
301   sc     = hx * hy * lambda;
302   hxdhy  = hx / hy;
303   hydhx  = hy / hx;
304   /*
305      Compute function over the locally owned part of the grid
306   */
307   for (j = info->ys; j < info->ys + info->ym; j++) {
308     for (i = info->xs; i < info->xs + info->xm; i++) {
309       if (i == 0 || j == 0 || i == info->mx - 1 || j == info->my - 1) {
310         lobj += PetscRealPart((hydhx + hxdhy) * x[j][i] * x[j][i]);
311       } else {
312         u  = x[j][i];
313         uw = x[j][i - 1];
314         ue = x[j][i + 1];
315         un = x[j - 1][i];
316         us = x[j + 1][i];
317 
318         if (i - 1 == 0) uw = 0.;
319         if (i + 1 == info->mx - 1) ue = 0.;
320         if (j - 1 == 0) un = 0.;
321         if (j + 1 == info->my - 1) us = 0.;
322 
323         /* F[u] = 1/2\int_{\omega}\nabla^2u(x)*u(x)*dx */
324 
325         uxux = u * (2. * u - ue - uw) * hydhx;
326         uyuy = u * (2. * u - un - us) * hxdhy;
327 
328         lobj += PetscRealPart(0.5 * (uxux + uyuy) - sc * PetscExpScalar(u));
329       }
330     }
331   }
332   PetscCall(PetscLogFlops(12.0 * info->ym * info->xm));
333   PetscCallMPI(MPI_Allreduce(&lobj, obj, 1, MPIU_REAL, MPIU_SUM, comm));
334   PetscFunctionReturn(0);
335 }
336 
337 /*
338    FormJacobianLocal - Evaluates Jacobian matrix on local process patch
339 */
340 static PetscErrorCode FormJacobianLocal(DMDALocalInfo *info, PetscScalar **x, Mat jac, Mat jacpre, AppCtx *user) {
341   PetscInt     i, j, k;
342   MatStencil   col[5], row;
343   PetscScalar  lambda, v[5], hx, hy, hxdhy, hydhx, sc;
344   DM           coordDA;
345   Vec          coordinates;
346   DMDACoor2d **coords;
347 
348   PetscFunctionBeginUser;
349   lambda = user->param;
350   /* Extract coordinates */
351   PetscCall(DMGetCoordinateDM(info->da, &coordDA));
352   PetscCall(DMGetCoordinates(info->da, &coordinates));
353   PetscCall(DMDAVecGetArray(coordDA, coordinates, &coords));
354   hx = info->xm > 1 ? PetscRealPart(coords[info->ys][info->xs + 1].x) - PetscRealPart(coords[info->ys][info->xs].x) : 1.0;
355   hy = info->ym > 1 ? PetscRealPart(coords[info->ys + 1][info->xs].y) - PetscRealPart(coords[info->ys][info->xs].y) : 1.0;
356   PetscCall(DMDAVecRestoreArray(coordDA, coordinates, &coords));
357   hxdhy = hx / hy;
358   hydhx = hy / hx;
359   sc    = hx * hy * lambda;
360 
361   /*
362      Compute entries for the locally owned part of the Jacobian.
363       - Currently, all PETSc parallel matrix formats are partitioned by
364         contiguous chunks of rows across the processors.
365       - Each processor needs to insert only elements that it owns
366         locally (but any non-local elements will be sent to the
367         appropriate processor during matrix assembly).
368       - Here, we set all entries for a particular row at once.
369       - We can set matrix entries either using either
370         MatSetValuesLocal() or MatSetValues(), as discussed above.
371   */
372   for (j = info->ys; j < info->ys + info->ym; j++) {
373     for (i = info->xs; i < info->xs + info->xm; i++) {
374       row.j = j;
375       row.i = i;
376       /* boundary points */
377       if (i == 0 || j == 0 || i == info->mx - 1 || j == info->my - 1) {
378         v[0] = 2.0 * (hydhx + hxdhy);
379         PetscCall(MatSetValuesStencil(jacpre, 1, &row, 1, &row, v, INSERT_VALUES));
380       } else {
381         k = 0;
382         /* interior grid points */
383         if (j - 1 != 0) {
384           v[k]     = -hxdhy;
385           col[k].j = j - 1;
386           col[k].i = i;
387           k++;
388         }
389         if (i - 1 != 0) {
390           v[k]     = -hydhx;
391           col[k].j = j;
392           col[k].i = i - 1;
393           k++;
394         }
395 
396         v[k]     = 2.0 * (hydhx + hxdhy) - sc * PetscExpScalar(x[j][i]);
397         col[k].j = row.j;
398         col[k].i = row.i;
399         k++;
400 
401         if (i + 1 != info->mx - 1) {
402           v[k]     = -hydhx;
403           col[k].j = j;
404           col[k].i = i + 1;
405           k++;
406         }
407         if (j + 1 != info->mx - 1) {
408           v[k]     = -hxdhy;
409           col[k].j = j + 1;
410           col[k].i = i;
411           k++;
412         }
413         PetscCall(MatSetValuesStencil(jacpre, 1, &row, k, col, v, INSERT_VALUES));
414       }
415     }
416   }
417 
418   /*
419      Assemble matrix, using the 2-step process:
420        MatAssemblyBegin(), MatAssemblyEnd().
421   */
422   PetscCall(MatAssemblyBegin(jacpre, MAT_FINAL_ASSEMBLY));
423   PetscCall(MatAssemblyEnd(jacpre, MAT_FINAL_ASSEMBLY));
424   /*
425      Tell the matrix we will never add a new nonzero location to the
426      matrix. If we do, it will generate an error.
427   */
428   PetscCall(MatSetOption(jac, MAT_NEW_NONZERO_LOCATION_ERR, PETSC_TRUE));
429   PetscFunctionReturn(0);
430 }
431 
432 static PetscErrorCode FormFunctionMatlab(SNES snes, Vec X, Vec F, void *ptr) {
433 #if PetscDefined(HAVE_MATLAB_ENGINE)
434   AppCtx   *user = (AppCtx *)ptr;
435   PetscInt  Mx, My;
436   PetscReal lambda, hx, hy;
437   Vec       localX, localF;
438   MPI_Comm  comm;
439   DM        da;
440 
441   PetscFunctionBeginUser;
442   PetscCall(SNESGetDM(snes, &da));
443   PetscCall(DMGetLocalVector(da, &localX));
444   PetscCall(DMGetLocalVector(da, &localF));
445   PetscCall(PetscObjectSetName((PetscObject)localX, "localX"));
446   PetscCall(PetscObjectSetName((PetscObject)localF, "localF"));
447   PetscCall(DMDAGetInfo(da, PETSC_IGNORE, &Mx, &My, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE));
448 
449   lambda = user->param;
450   hx     = 1.0 / (PetscReal)(Mx - 1);
451   hy     = 1.0 / (PetscReal)(My - 1);
452 
453   PetscCall(PetscObjectGetComm((PetscObject)snes, &comm));
454   /*
455      Scatter ghost points to local vector,using the 2-step process
456         DMGlobalToLocalBegin(),DMGlobalToLocalEnd().
457      By placing code between these two statements, computations can be
458      done while messages are in transition.
459   */
460   PetscCall(DMGlobalToLocalBegin(da, X, INSERT_VALUES, localX));
461   PetscCall(DMGlobalToLocalEnd(da, X, INSERT_VALUES, localX));
462   PetscCall(PetscMatlabEnginePut(PETSC_MATLAB_ENGINE_(comm), (PetscObject)localX));
463   PetscCall(PetscMatlabEngineEvaluate(PETSC_MATLAB_ENGINE_(comm), "localF=ex5m(localX,%18.16e,%18.16e,%18.16e)", (double)hx, (double)hy, (double)lambda));
464   PetscCall(PetscMatlabEngineGet(PETSC_MATLAB_ENGINE_(comm), (PetscObject)localF));
465 
466   /*
467      Insert values into global vector
468   */
469   PetscCall(DMLocalToGlobalBegin(da, localF, INSERT_VALUES, F));
470   PetscCall(DMLocalToGlobalEnd(da, localF, INSERT_VALUES, F));
471   PetscCall(DMRestoreLocalVector(da, &localX));
472   PetscCall(DMRestoreLocalVector(da, &localF));
473   PetscFunctionReturn(0);
474 #else
475   return 0; /* Never called */
476 #endif
477 }
478 
479 /* ------------------------------------------------------------------- */
480 /*
481       Applies some sweeps on nonlinear Gauss-Seidel on each process
482 
483  */
484 static PetscErrorCode NonlinearGS(SNES snes, Vec X, Vec B, void *ctx) {
485   PetscInt      i, j, k, Mx, My, xs, ys, xm, ym, its, tot_its, sweeps, l;
486   PetscReal     lambda, hx, hy, hxdhy, hydhx, sc;
487   PetscScalar **x, **b, bij, F, F0 = 0, J, u, un, us, ue, eu, uw, uxx, uyy, y;
488   PetscReal     atol, rtol, stol;
489   DM            da;
490   AppCtx       *user;
491   Vec           localX, localB;
492 
493   PetscFunctionBeginUser;
494   tot_its = 0;
495   PetscCall(SNESNGSGetSweeps(snes, &sweeps));
496   PetscCall(SNESNGSGetTolerances(snes, &atol, &rtol, &stol, &its));
497   PetscCall(SNESGetDM(snes, &da));
498   PetscCall(DMGetApplicationContext(da, &user));
499 
500   PetscCall(DMDAGetInfo(da, PETSC_IGNORE, &Mx, &My, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE));
501 
502   lambda = user->param;
503   hx     = 1.0 / (PetscReal)(Mx - 1);
504   hy     = 1.0 / (PetscReal)(My - 1);
505   sc     = hx * hy * lambda;
506   hxdhy  = hx / hy;
507   hydhx  = hy / hx;
508 
509   PetscCall(DMGetLocalVector(da, &localX));
510   if (B) PetscCall(DMGetLocalVector(da, &localB));
511   for (l = 0; l < sweeps; l++) {
512     PetscCall(DMGlobalToLocalBegin(da, X, INSERT_VALUES, localX));
513     PetscCall(DMGlobalToLocalEnd(da, X, INSERT_VALUES, localX));
514     if (B) {
515       PetscCall(DMGlobalToLocalBegin(da, B, INSERT_VALUES, localB));
516       PetscCall(DMGlobalToLocalEnd(da, B, INSERT_VALUES, localB));
517     }
518     /*
519      Get a pointer to vector data.
520      - For default PETSc vectors, VecGetArray() returns a pointer to
521      the data array.  Otherwise, the routine is implementation dependent.
522      - You MUST call VecRestoreArray() when you no longer need access to
523      the array.
524      */
525     PetscCall(DMDAVecGetArray(da, localX, &x));
526     if (B) PetscCall(DMDAVecGetArray(da, localB, &b));
527     /*
528      Get local grid boundaries (for 2-dimensional DMDA):
529      xs, ys   - starting grid indices (no ghost points)
530      xm, ym   - widths of local grid (no ghost points)
531      */
532     PetscCall(DMDAGetCorners(da, &xs, &ys, NULL, &xm, &ym, NULL));
533 
534     for (j = ys; j < ys + ym; j++) {
535       for (i = xs; i < xs + xm; i++) {
536         if (i == 0 || j == 0 || i == Mx - 1 || j == My - 1) {
537           /* boundary conditions are all zero Dirichlet */
538           x[j][i] = 0.0;
539         } else {
540           if (B) bij = b[j][i];
541           else bij = 0.;
542 
543           u  = x[j][i];
544           un = x[j - 1][i];
545           us = x[j + 1][i];
546           ue = x[j][i - 1];
547           uw = x[j][i + 1];
548 
549           for (k = 0; k < its; k++) {
550             eu  = PetscExpScalar(u);
551             uxx = (2.0 * u - ue - uw) * hydhx;
552             uyy = (2.0 * u - un - us) * hxdhy;
553             F   = uxx + uyy - sc * eu - bij;
554             if (k == 0) F0 = F;
555             J = 2.0 * (hydhx + hxdhy) - sc * eu;
556             y = F / J;
557             u -= y;
558             tot_its++;
559 
560             if (atol > PetscAbsReal(PetscRealPart(F)) || rtol * PetscAbsReal(PetscRealPart(F0)) > PetscAbsReal(PetscRealPart(F)) || stol * PetscAbsReal(PetscRealPart(u)) > PetscAbsReal(PetscRealPart(y))) break;
561           }
562           x[j][i] = u;
563         }
564       }
565     }
566     /*
567      Restore vector
568      */
569     PetscCall(DMDAVecRestoreArray(da, localX, &x));
570     PetscCall(DMLocalToGlobalBegin(da, localX, INSERT_VALUES, X));
571     PetscCall(DMLocalToGlobalEnd(da, localX, INSERT_VALUES, X));
572   }
573   PetscCall(PetscLogFlops(tot_its * (21.0)));
574   PetscCall(DMRestoreLocalVector(da, &localX));
575   if (B) {
576     PetscCall(DMDAVecRestoreArray(da, localB, &b));
577     PetscCall(DMRestoreLocalVector(da, &localB));
578   }
579   PetscFunctionReturn(0);
580 }
581 
582 int main(int argc, char **argv) {
583   SNES      snes; /* nonlinear solver */
584   Vec       x;    /* solution vector */
585   AppCtx    user; /* user-defined work context */
586   PetscInt  its;  /* iterations for convergence */
587   PetscReal bratu_lambda_max = 6.81;
588   PetscReal bratu_lambda_min = 0.;
589   PetscInt  MMS              = 1;
590   PetscBool flg              = PETSC_FALSE, setMMS;
591   DM        da;
592   Vec       r = NULL;
593   KSP       ksp;
594   PetscInt  lits, slits;
595 
596   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
597      Initialize program
598      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
599 
600   PetscFunctionBeginUser;
601   PetscCall(PetscInitialize(&argc, &argv, (char *)0, help));
602 
603   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
604      Initialize problem parameters
605   - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
606   user.param = 6.0;
607   PetscCall(PetscOptionsGetReal(NULL, NULL, "-par", &user.param, NULL));
608   PetscCheck(user.param <= bratu_lambda_max && user.param >= bratu_lambda_min, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Lambda, %g, is out of range, [%g, %g]", (double)user.param, (double)bratu_lambda_min, (double)bratu_lambda_max);
609   PetscCall(PetscOptionsGetInt(NULL, NULL, "-mms", &MMS, &setMMS));
610   if (MMS == 3) {
611     PetscInt mPar = 2, nPar = 1;
612     PetscCall(PetscOptionsGetInt(NULL, NULL, "-m_par", &mPar, NULL));
613     PetscCall(PetscOptionsGetInt(NULL, NULL, "-n_par", &nPar, NULL));
614     user.m = PetscPowInt(2, mPar);
615     user.n = PetscPowInt(2, nPar);
616   }
617 
618   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
619      Create nonlinear solver context
620      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
621   PetscCall(SNESCreate(PETSC_COMM_WORLD, &snes));
622   PetscCall(SNESSetCountersReset(snes, PETSC_FALSE));
623   PetscCall(SNESSetNGS(snes, NonlinearGS, NULL));
624 
625   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
626      Create distributed array (DMDA) to manage parallel grid and vectors
627   - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
628   PetscCall(DMDACreate2d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DMDA_STENCIL_STAR, 4, 4, PETSC_DECIDE, PETSC_DECIDE, 1, 1, NULL, NULL, &da));
629   PetscCall(DMSetFromOptions(da));
630   PetscCall(DMSetUp(da));
631   PetscCall(DMDASetUniformCoordinates(da, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0));
632   PetscCall(DMSetApplicationContext(da, &user));
633   PetscCall(SNESSetDM(snes, da));
634   /*  - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
635      Extract global vectors from DMDA; then duplicate for remaining
636      vectors that are the same types
637    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
638   PetscCall(DMCreateGlobalVector(da, &x));
639 
640   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
641      Set local function evaluation routine
642   - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
643   switch (MMS) {
644   case 0:
645     user.mms_solution = ZeroBCSolution;
646     user.mms_forcing  = NULL;
647     break;
648   case 1:
649     user.mms_solution = MMSSolution1;
650     user.mms_forcing  = MMSForcing1;
651     break;
652   case 2:
653     user.mms_solution = MMSSolution2;
654     user.mms_forcing  = MMSForcing2;
655     break;
656   case 3:
657     user.mms_solution = MMSSolution3;
658     user.mms_forcing  = MMSForcing3;
659     break;
660   case 4:
661     user.mms_solution = MMSSolution4;
662     user.mms_forcing  = MMSForcing4;
663     break;
664   default: SETERRQ(PETSC_COMM_WORLD, PETSC_ERR_USER, "Unknown MMS type %" PetscInt_FMT, MMS);
665   }
666   PetscCall(DMDASNESSetFunctionLocal(da, INSERT_VALUES, (DMDASNESFunction)FormFunctionLocal, &user));
667   PetscCall(PetscOptionsGetBool(NULL, NULL, "-fd", &flg, NULL));
668   if (!flg) PetscCall(DMDASNESSetJacobianLocal(da, (DMDASNESJacobian)FormJacobianLocal, &user));
669 
670   PetscCall(PetscOptionsGetBool(NULL, NULL, "-obj", &flg, NULL));
671   if (flg) PetscCall(DMDASNESSetObjectiveLocal(da, (DMDASNESObjective)FormObjectiveLocal, &user));
672 
673   if (PetscDefined(HAVE_MATLAB_ENGINE)) {
674     PetscBool matlab_function = PETSC_FALSE;
675     PetscCall(PetscOptionsGetBool(NULL, NULL, "-matlab_function", &matlab_function, 0));
676     if (matlab_function) {
677       PetscCall(VecDuplicate(x, &r));
678       PetscCall(SNESSetFunction(snes, r, FormFunctionMatlab, &user));
679     }
680   }
681 
682   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
683      Customize nonlinear solver; set runtime options
684    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
685   PetscCall(SNESSetFromOptions(snes));
686 
687   PetscCall(FormInitialGuess(da, &user, x));
688 
689   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
690      Solve nonlinear system
691      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
692   PetscCall(SNESSolve(snes, NULL, x));
693   PetscCall(SNESGetIterationNumber(snes, &its));
694 
695   PetscCall(SNESGetLinearSolveIterations(snes, &slits));
696   PetscCall(SNESGetKSP(snes, &ksp));
697   PetscCall(KSPGetTotalIterations(ksp, &lits));
698   PetscCheck(lits == slits, PETSC_COMM_WORLD, PETSC_ERR_PLIB, "Number of total linear iterations reported by SNES %" PetscInt_FMT " does not match reported by KSP %" PetscInt_FMT, slits, lits);
699   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
700    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
701 
702   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
703      If using MMS, check the l_2 error
704    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
705   if (setMMS) {
706     Vec       e;
707     PetscReal errorl2, errorinf;
708     PetscInt  N;
709 
710     PetscCall(VecDuplicate(x, &e));
711     PetscCall(PetscObjectViewFromOptions((PetscObject)x, NULL, "-sol_view"));
712     PetscCall(FormExactSolution(da, &user, e));
713     PetscCall(PetscObjectViewFromOptions((PetscObject)e, NULL, "-exact_view"));
714     PetscCall(VecAXPY(e, -1.0, x));
715     PetscCall(PetscObjectViewFromOptions((PetscObject)e, NULL, "-error_view"));
716     PetscCall(VecNorm(e, NORM_2, &errorl2));
717     PetscCall(VecNorm(e, NORM_INFINITY, &errorinf));
718     PetscCall(VecGetSize(e, &N));
719     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "N: %" PetscInt_FMT " error L2 %g inf %g\n", N, (double)(errorl2 / PetscSqrtReal((PetscReal)N)), (double)errorinf));
720     PetscCall(VecDestroy(&e));
721     PetscCall(PetscLogEventSetDof(SNES_Solve, 0, N));
722     PetscCall(PetscLogEventSetError(SNES_Solve, 0, errorl2 / PetscSqrtReal(N)));
723   }
724 
725   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
726      Free work space.  All PETSc objects should be destroyed when they
727      are no longer needed.
728    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
729   PetscCall(VecDestroy(&r));
730   PetscCall(VecDestroy(&x));
731   PetscCall(SNESDestroy(&snes));
732   PetscCall(DMDestroy(&da));
733   PetscCall(PetscFinalize());
734   return 0;
735 }
736 
737 /*TEST
738 
739    test:
740      suffix: asm_0
741      requires: !single
742      args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 2 -pc_asm_overlap 0 -pc_asm_local_type additive -sub_pc_type lu
743 
744    test:
745      suffix: msm_0
746      requires: !single
747      args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 2 -pc_asm_overlap 0 -pc_asm_local_type multiplicative -sub_pc_type lu
748 
749    test:
750      suffix: asm_1
751      requires: !single
752      args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 2 -pc_asm_overlap 0 -pc_asm_local_type additive -sub_pc_type lu -da_grid_x 8
753 
754    test:
755      suffix: msm_1
756      requires: !single
757      args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 2 -pc_asm_overlap 0 -pc_asm_local_type multiplicative -sub_pc_type lu -da_grid_x 8
758 
759    test:
760      suffix: asm_2
761      requires: !single
762      args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 3 -pc_asm_overlap 0 -pc_asm_local_type additive -sub_pc_type lu -da_grid_x 8
763 
764    test:
765      suffix: msm_2
766      requires: !single
767      args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 3 -pc_asm_overlap 0 -pc_asm_local_type multiplicative -sub_pc_type lu -da_grid_x 8
768 
769    test:
770      suffix: asm_3
771      requires: !single
772      args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 4 -pc_asm_overlap 0 -pc_asm_local_type additive -sub_pc_type lu -da_grid_x 8
773 
774    test:
775      suffix: msm_3
776      requires: !single
777      args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 4 -pc_asm_overlap 0 -pc_asm_local_type multiplicative -sub_pc_type lu -da_grid_x 8
778 
779    test:
780      suffix: asm_4
781      requires: !single
782      nsize: 2
783      args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 2 -pc_asm_overlap 0 -pc_asm_local_type additive -sub_pc_type lu -da_grid_x 8
784 
785    test:
786      suffix: msm_4
787      requires: !single
788      nsize: 2
789      args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 2 -pc_asm_overlap 0 -pc_asm_local_type multiplicative -sub_pc_type lu -da_grid_x 8
790 
791    test:
792      suffix: asm_5
793      requires: !single
794      nsize: 2
795      args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 4 -pc_asm_overlap 0 -pc_asm_local_type additive -sub_pc_type lu -da_grid_x 8
796 
797    test:
798      suffix: msm_5
799      requires: !single
800      nsize: 2
801      args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 4 -pc_asm_overlap 0 -pc_asm_local_type multiplicative -sub_pc_type lu -da_grid_x 8
802 
803    test:
804      args: -snes_rtol 1.e-5 -pc_type mg -ksp_monitor_short -snes_view -pc_mg_levels 3 -pc_mg_galerkin pmat -da_grid_x 17 -da_grid_y 17 -mg_levels_ksp_monitor_short -mg_levels_ksp_norm_type unpreconditioned -snes_monitor_short -mg_levels_ksp_chebyshev_esteig 0.5,1.1 -mg_levels_pc_type sor -pc_mg_type full
805      requires: !single
806 
807    test:
808      suffix: 2
809      args: -pc_type mg -ksp_converged_reason -snes_view -pc_mg_galerkin pmat -snes_grid_sequence 3 -mg_levels_ksp_norm_type unpreconditioned -snes_monitor_short -mg_levels_ksp_chebyshev_esteig 0.5,1.1 -mg_levels_pc_type sor -pc_mg_type full -ksp_atol -1.
810 
811    test:
812      suffix: 3
813      nsize: 2
814      args: -snes_grid_sequence 2 -snes_mf_operator -snes_converged_reason -snes_view -pc_type mg -snes_atol -1 -snes_rtol 1.e-2
815      filter: grep -v "otal number of function evaluations"
816 
817    test:
818      suffix: 4
819      nsize: 2
820      args: -snes_grid_sequence 2 -snes_monitor_short -ksp_converged_reason -snes_converged_reason -snes_view -pc_type mg -snes_atol -1 -ksp_atol -1
821 
822    test:
823      suffix: 5_anderson
824      args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type anderson
825 
826    test:
827      suffix: 5_aspin
828      nsize: 4
829      args: -snes_monitor_short -ksp_monitor_short -snes_converged_reason -da_refine 4 -da_overlap 3 -snes_type aspin -snes_view
830 
831    test:
832      suffix: 5_broyden
833      args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type qn -snes_qn_type broyden -snes_qn_m 10
834 
835    test:
836      suffix: 5_fas
837      args: -fas_coarse_snes_max_it 1 -fas_coarse_pc_type lu -fas_coarse_ksp_type preonly -snes_monitor_short -snes_type fas -fas_coarse_ksp_type richardson -da_refine 6
838      requires: !single
839 
840    test:
841      suffix: 5_fas_additive
842      args: -fas_coarse_snes_max_it 1 -fas_coarse_pc_type lu -fas_coarse_ksp_type preonly -snes_monitor_short -snes_type fas -fas_coarse_ksp_type richardson -da_refine 6 -snes_fas_type additive -snes_max_it 50
843 
844    test:
845      suffix: 5_fas_monitor
846      args: -da_refine 1 -snes_type fas -snes_fas_monitor
847      requires: !single
848 
849    test:
850      suffix: 5_ls
851      args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type newtonls
852 
853    test:
854      suffix: 5_ls_sell_sor
855      args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type newtonls -dm_mat_type sell -pc_type sor
856      output_file: output/ex5_5_ls.out
857 
858    test:
859      suffix: 5_nasm
860      nsize: 4
861      args: -snes_monitor_short -snes_converged_reason -da_refine 4 -da_overlap 3 -snes_type nasm -snes_nasm_type restrict -snes_max_it 10
862 
863    test:
864      suffix: 5_ncg
865      args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type ncg -snes_ncg_type fr
866 
867    test:
868      suffix: 5_newton_asm_dmda
869      nsize: 4
870      args: -snes_monitor_short -ksp_monitor_short -snes_converged_reason -da_refine 4 -da_overlap 3 -snes_type newtonls -pc_type asm -pc_asm_dm_subdomains -malloc_dump
871      requires: !single
872 
873    test:
874      suffix: 5_newton_gasm_dmda
875      nsize: 4
876      args: -snes_monitor_short -ksp_monitor_short -snes_converged_reason -da_refine 4 -da_overlap 3 -snes_type newtonls -pc_type gasm -malloc_dump
877      requires: !single
878 
879    test:
880      suffix: 5_ngmres
881      args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type ngmres -snes_ngmres_m 10
882 
883    test:
884      suffix: 5_ngmres_fas
885      args: -snes_rtol 1.e-4 -snes_type ngmres -npc_fas_coarse_snes_max_it 1 -npc_fas_coarse_snes_type newtonls -npc_fas_coarse_pc_type lu -npc_fas_coarse_ksp_type preonly -snes_ngmres_m 10 -snes_monitor_short -npc_snes_max_it 1 -npc_snes_type fas -npc_fas_coarse_ksp_type richardson -da_refine 6
886 
887    test:
888      suffix: 5_ngmres_ngs
889      args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type ngmres -npc_snes_type ngs -npc_snes_max_it 1
890 
891    test:
892      suffix: 5_ngmres_nrichardson
893      args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type ngmres -snes_ngmres_m 10 -npc_snes_type nrichardson -npc_snes_max_it 3
894      output_file: output/ex5_5_ngmres_richardson.out
895 
896    test:
897      suffix: 5_nrichardson
898      args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type nrichardson
899 
900    test:
901      suffix: 5_qn
902      args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type qn -snes_linesearch_type cp -snes_qn_m 10
903 
904    test:
905      suffix: 6
906      nsize: 4
907      args: -snes_converged_reason -ksp_converged_reason -da_grid_x 129 -da_grid_y 129 -pc_type mg -pc_mg_levels 8 -mg_levels_ksp_type chebyshev -mg_levels_ksp_chebyshev_esteig 0,0.5,0,1.1 -mg_levels_ksp_max_it 2
908 
909    test:
910      requires: complex !single
911      suffix: complex
912      args: -snes_mf_operator -mat_mffd_complex -snes_monitor
913 
914 TEST*/
915