xref: /petsc/src/snes/tutorials/ex5.c (revision c3e4dd79e17d3f0a51a524340fae4661630fd09e)
1 static char help[] = "Bratu nonlinear PDE in 2d.\n\
2 We solve the  Bratu (SFI - solid fuel ignition) problem in a 2D rectangular\n\
3 domain, using distributed arrays (DMDAs) to partition the parallel grid.\n\
4 The command line options include:\n\
5   -par <parameter>, where <parameter> indicates the problem's nonlinearity\n\
6      problem SFI:  <parameter> = Bratu parameter (0 <= par <= 6.81)\n\n\
7   -m_par/n_par <parameter>, where <parameter> indicates an integer\n \
8       that MMS3 will be evaluated with 2^m_par, 2^n_par";
9 
10 /* ------------------------------------------------------------------------
11 
12     Solid Fuel Ignition (SFI) problem.  This problem is modeled by
13     the partial differential equation
14 
15             -Laplacian u - lambda*exp(u) = 0,  0 < x,y < 1,
16 
17     with boundary conditions
18 
19              u = 0  for  x = 0, x = 1, y = 0, y = 1.
20 
21     A finite difference approximation with the usual 5-point stencil
22     is used to discretize the boundary value problem to obtain a nonlinear
23     system of equations.
24 
25       This example shows how geometric multigrid can be run transparently with a nonlinear solver so long
26       as SNESSetDM() is provided. Example usage
27 
28       ./ex5 -pc_type mg -ksp_monitor  -snes_view -pc_mg_levels 3 -pc_mg_galerkin pmat -da_grid_x 17 -da_grid_y 17
29              -mg_levels_ksp_monitor -snes_monitor -mg_levels_pc_type sor -pc_mg_type full
30 
31       or to run with grid sequencing on the nonlinear problem (note that you do not need to provide the number of
32          multigrid levels, it will be determined automatically based on the number of refinements done)
33 
34       ./ex5 -pc_type mg -ksp_monitor  -snes_view -pc_mg_galerkin pmat -snes_grid_sequence 3
35              -mg_levels_ksp_monitor -snes_monitor -mg_levels_pc_type sor -pc_mg_type full
36 
37   ------------------------------------------------------------------------- */
38 
39 /*
40    Include "petscdmda.h" so that we can use distributed arrays (DMDAs).
41    Include "petscsnes.h" so that we can use SNES solvers.  Note that this
42 */
43 #include <petscdm.h>
44 #include <petscdmda.h>
45 #include <petscsnes.h>
46 #include <petscmatlab.h>
47 #include <petsc/private/snesimpl.h> /* For SNES_Solve event */
48 
49 /*
50    User-defined application context - contains data needed by the
51    application-provided call-back routines, FormJacobianLocal() and
52    FormFunctionLocal().
53 */
54 typedef struct AppCtx AppCtx;
55 struct AppCtx {
56   PetscReal param;          /* test problem parameter */
57   PetscInt  m,n;            /* MMS3 parameters */
58   PetscErrorCode (*mms_solution)(AppCtx*,const DMDACoor2d*,PetscScalar*);
59   PetscErrorCode (*mms_forcing)(AppCtx*,const DMDACoor2d*,PetscScalar*);
60 };
61 
62 /* ------------------------------------------------------------------- */
63 /*
64    FormInitialGuess - Forms initial approximation.
65 
66    Input Parameters:
67    da - The DM
68    user - user-defined application context
69 
70    Output Parameter:
71    X - vector
72  */
73 static PetscErrorCode FormInitialGuess(DM da,AppCtx *user,Vec X)
74 {
75   PetscInt       i,j,Mx,My,xs,ys,xm,ym;
76   PetscReal      lambda,temp1,temp,hx,hy;
77   PetscScalar    **x;
78 
79   PetscFunctionBeginUser;
80   PetscCall(DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE));
81 
82   lambda = user->param;
83   hx     = 1.0/(PetscReal)(Mx-1);
84   hy     = 1.0/(PetscReal)(My-1);
85   temp1  = lambda/(lambda + 1.0);
86 
87   /*
88      Get a pointer to vector data.
89        - For default PETSc vectors, VecGetArray() returns a pointer to
90          the data array.  Otherwise, the routine is implementation dependent.
91        - You MUST call VecRestoreArray() when you no longer need access to
92          the array.
93   */
94   PetscCall(DMDAVecGetArray(da,X,&x));
95 
96   /*
97      Get local grid boundaries (for 2-dimensional DMDA):
98        xs, ys   - starting grid indices (no ghost points)
99        xm, ym   - widths of local grid (no ghost points)
100 
101   */
102   PetscCall(DMDAGetCorners(da,&xs,&ys,NULL,&xm,&ym,NULL));
103 
104   /*
105      Compute initial guess over the locally owned part of the grid
106   */
107   for (j=ys; j<ys+ym; j++) {
108     temp = (PetscReal)(PetscMin(j,My-j-1))*hy;
109     for (i=xs; i<xs+xm; i++) {
110       if (i == 0 || j == 0 || i == Mx-1 || j == My-1) {
111         /* boundary conditions are all zero Dirichlet */
112         x[j][i] = 0.0;
113       } else {
114         x[j][i] = temp1*PetscSqrtReal(PetscMin((PetscReal)(PetscMin(i,Mx-i-1))*hx,temp));
115       }
116     }
117   }
118 
119   /*
120      Restore vector
121   */
122   PetscCall(DMDAVecRestoreArray(da,X,&x));
123   PetscFunctionReturn(0);
124 }
125 
126 /*
127   FormExactSolution - Forms MMS solution
128 
129   Input Parameters:
130   da - The DM
131   user - user-defined application context
132 
133   Output Parameter:
134   X - vector
135  */
136 static PetscErrorCode FormExactSolution(DM da, AppCtx *user, Vec U)
137 {
138   DM             coordDA;
139   Vec            coordinates;
140   DMDACoor2d   **coords;
141   PetscScalar  **u;
142   PetscInt       xs, ys, xm, ym, i, j;
143 
144   PetscFunctionBeginUser;
145   PetscCall(DMDAGetCorners(da, &xs, &ys, NULL, &xm, &ym, NULL));
146   PetscCall(DMGetCoordinateDM(da, &coordDA));
147   PetscCall(DMGetCoordinates(da, &coordinates));
148   PetscCall(DMDAVecGetArray(coordDA, coordinates, &coords));
149   PetscCall(DMDAVecGetArray(da, U, &u));
150   for (j = ys; j < ys+ym; ++j) {
151     for (i = xs; i < xs+xm; ++i) {
152       user->mms_solution(user,&coords[j][i],&u[j][i]);
153     }
154   }
155   PetscCall(DMDAVecRestoreArray(da, U, &u));
156   PetscCall(DMDAVecRestoreArray(coordDA, coordinates, &coords));
157   PetscFunctionReturn(0);
158 }
159 
160 static PetscErrorCode ZeroBCSolution(AppCtx *user,const DMDACoor2d *c,PetscScalar *u)
161 {
162   u[0] = 0.;
163   return 0;
164 }
165 
166 /* The functions below evaluate the MMS solution u(x,y) and associated forcing
167 
168      f(x,y) = -u_xx - u_yy - lambda exp(u)
169 
170   such that u(x,y) is an exact solution with f(x,y) as the right hand side forcing term.
171  */
172 static PetscErrorCode MMSSolution1(AppCtx *user,const DMDACoor2d *c,PetscScalar *u)
173 {
174   PetscReal x = PetscRealPart(c->x), y = PetscRealPart(c->y);
175   u[0] = x*(1 - x)*y*(1 - y);
176   PetscLogFlops(5);
177   return 0;
178 }
179 static PetscErrorCode MMSForcing1(AppCtx *user,const DMDACoor2d *c,PetscScalar *f)
180 {
181   PetscReal x = PetscRealPart(c->x), y = PetscRealPart(c->y);
182   f[0] = 2*x*(1 - x) + 2*y*(1 - y) - user->param*PetscExpReal(x*(1 - x)*y*(1 - y));
183   return 0;
184 }
185 
186 static PetscErrorCode MMSSolution2(AppCtx *user,const DMDACoor2d *c,PetscScalar *u)
187 {
188   PetscReal x = PetscRealPart(c->x), y = PetscRealPart(c->y);
189   u[0] = PetscSinReal(PETSC_PI*x)*PetscSinReal(PETSC_PI*y);
190   PetscLogFlops(5);
191   return 0;
192 }
193 static PetscErrorCode MMSForcing2(AppCtx *user,const DMDACoor2d *c,PetscScalar *f)
194 {
195   PetscReal x = PetscRealPart(c->x), y = PetscRealPart(c->y);
196   f[0] = 2*PetscSqr(PETSC_PI)*PetscSinReal(PETSC_PI*x)*PetscSinReal(PETSC_PI*y) - user->param*PetscExpReal(PetscSinReal(PETSC_PI*x)*PetscSinReal(PETSC_PI*y));
197   return 0;
198 }
199 
200 static PetscErrorCode MMSSolution3(AppCtx *user,const DMDACoor2d *c,PetscScalar *u)
201 {
202   PetscReal x = PetscRealPart(c->x), y = PetscRealPart(c->y);
203   u[0] = PetscSinReal(user->m*PETSC_PI*x*(1-y))*PetscSinReal(user->n*PETSC_PI*y*(1-x));
204   PetscLogFlops(5);
205   return 0;
206 }
207 static PetscErrorCode MMSForcing3(AppCtx *user,const DMDACoor2d *c,PetscScalar *f)
208 {
209   PetscReal x = PetscRealPart(c->x), y = PetscRealPart(c->y);
210   PetscReal m = user->m, n = user->n, lambda = user->param;
211   f[0] = (-(PetscExpReal(PetscSinReal(m*PETSC_PI*x*(1 - y))*PetscSinReal(n*PETSC_PI*(1 - x)*y))*lambda)
212           + PetscSqr(PETSC_PI)*(-2*m*n*((-1 + x)*x + (-1 + y)*y)*PetscCosReal(m*PETSC_PI*x*(-1 + y))*PetscCosReal(n*PETSC_PI*(-1 + x)*y)
213                                 + (PetscSqr(m)*(PetscSqr(x) + PetscSqr(-1 + y)) + PetscSqr(n)*(PetscSqr(-1 + x) + PetscSqr(y)))
214                                 *PetscSinReal(m*PETSC_PI*x*(-1 + y))*PetscSinReal(n*PETSC_PI*(-1 + x)*y)));
215   return 0;
216 }
217 
218 static PetscErrorCode MMSSolution4(AppCtx *user,const DMDACoor2d *c,PetscScalar *u)
219 {
220   const PetscReal Lx = 1.,Ly = 1.;
221   PetscReal x = PetscRealPart(c->x), y = PetscRealPart(c->y);
222   u[0] = (PetscPowReal(x,4)-PetscSqr(Lx)*PetscSqr(x))*(PetscPowReal(y,4)-PetscSqr(Ly)*PetscSqr(y));
223   PetscLogFlops(9);
224   return 0;
225 }
226 static PetscErrorCode MMSForcing4(AppCtx *user,const DMDACoor2d *c,PetscScalar *f)
227 {
228   const PetscReal Lx = 1.,Ly = 1.;
229   PetscReal x = PetscRealPart(c->x), y = PetscRealPart(c->y);
230   f[0] = (2*PetscSqr(x)*(PetscSqr(x)-PetscSqr(Lx))*(PetscSqr(Ly)-6*PetscSqr(y))
231           + 2*PetscSqr(y)*(PetscSqr(Lx)-6*PetscSqr(x))*(PetscSqr(y)-PetscSqr(Ly))
232           - user->param*PetscExpReal((PetscPowReal(x,4)-PetscSqr(Lx)*PetscSqr(x))*(PetscPowReal(y,4)-PetscSqr(Ly)*PetscSqr(y))));
233   return 0;
234 }
235 
236 /* ------------------------------------------------------------------- */
237 /*
238    FormFunctionLocal - Evaluates nonlinear function, F(x) on local process patch
239 
240  */
241 static PetscErrorCode FormFunctionLocal(DMDALocalInfo *info,PetscScalar **x,PetscScalar **f,AppCtx *user)
242 {
243   PetscInt       i,j;
244   PetscReal      lambda,hx,hy,hxdhy,hydhx;
245   PetscScalar    u,ue,uw,un,us,uxx,uyy,mms_solution,mms_forcing;
246   DMDACoor2d     c;
247 
248   PetscFunctionBeginUser;
249   lambda = user->param;
250   hx     = 1.0/(PetscReal)(info->mx-1);
251   hy     = 1.0/(PetscReal)(info->my-1);
252   hxdhy  = hx/hy;
253   hydhx  = hy/hx;
254   /*
255      Compute function over the locally owned part of the grid
256   */
257   for (j=info->ys; j<info->ys+info->ym; j++) {
258     for (i=info->xs; i<info->xs+info->xm; i++) {
259       if (i == 0 || j == 0 || i == info->mx-1 || j == info->my-1) {
260         c.x = i*hx; c.y = j*hy;
261         PetscCall(user->mms_solution(user,&c,&mms_solution));
262         f[j][i] = 2.0*(hydhx+hxdhy)*(x[j][i] - mms_solution);
263       } else {
264         u  = x[j][i];
265         uw = x[j][i-1];
266         ue = x[j][i+1];
267         un = x[j-1][i];
268         us = x[j+1][i];
269 
270         /* Enforce boundary conditions at neighboring points -- setting these values causes the Jacobian to be symmetric. */
271         if (i-1 == 0) {c.x = (i-1)*hx; c.y = j*hy; PetscCall(user->mms_solution(user,&c,&uw));}
272         if (i+1 == info->mx-1) {c.x = (i+1)*hx; c.y = j*hy; PetscCall(user->mms_solution(user,&c,&ue));}
273         if (j-1 == 0) {c.x = i*hx; c.y = (j-1)*hy; PetscCall(user->mms_solution(user,&c,&un));}
274         if (j+1 == info->my-1) {c.x = i*hx; c.y = (j+1)*hy; PetscCall(user->mms_solution(user,&c,&us));}
275 
276         uxx     = (2.0*u - uw - ue)*hydhx;
277         uyy     = (2.0*u - un - us)*hxdhy;
278         mms_forcing = 0;
279         c.x = i*hx; c.y = j*hy;
280         if (user->mms_forcing) PetscCall(user->mms_forcing(user,&c,&mms_forcing));
281         f[j][i] = uxx + uyy - hx*hy*(lambda*PetscExpScalar(u) + mms_forcing);
282       }
283     }
284   }
285   PetscCall(PetscLogFlops(11.0*info->ym*info->xm));
286   PetscFunctionReturn(0);
287 }
288 
289 /* FormObjectiveLocal - Evaluates nonlinear function, F(x) on local process patch */
290 static PetscErrorCode FormObjectiveLocal(DMDALocalInfo *info,PetscScalar **x,PetscReal *obj,AppCtx *user)
291 {
292   PetscInt       i,j;
293   PetscReal      lambda,hx,hy,hxdhy,hydhx,sc,lobj=0;
294   PetscScalar    u,ue,uw,un,us,uxux,uyuy;
295   MPI_Comm       comm;
296 
297   PetscFunctionBeginUser;
298   *obj   = 0;
299   PetscCall(PetscObjectGetComm((PetscObject)info->da,&comm));
300   lambda = user->param;
301   hx     = 1.0/(PetscReal)(info->mx-1);
302   hy     = 1.0/(PetscReal)(info->my-1);
303   sc     = hx*hy*lambda;
304   hxdhy  = hx/hy;
305   hydhx  = hy/hx;
306   /*
307      Compute function over the locally owned part of the grid
308   */
309   for (j=info->ys; j<info->ys+info->ym; j++) {
310     for (i=info->xs; i<info->xs+info->xm; i++) {
311       if (i == 0 || j == 0 || i == info->mx-1 || j == info->my-1) {
312         lobj += PetscRealPart((hydhx + hxdhy)*x[j][i]*x[j][i]);
313       } else {
314         u  = x[j][i];
315         uw = x[j][i-1];
316         ue = x[j][i+1];
317         un = x[j-1][i];
318         us = x[j+1][i];
319 
320         if (i-1 == 0) uw = 0.;
321         if (i+1 == info->mx-1) ue = 0.;
322         if (j-1 == 0) un = 0.;
323         if (j+1 == info->my-1) us = 0.;
324 
325         /* F[u] = 1/2\int_{\omega}\nabla^2u(x)*u(x)*dx */
326 
327         uxux = u*(2.*u - ue - uw)*hydhx;
328         uyuy = u*(2.*u - un - us)*hxdhy;
329 
330         lobj += PetscRealPart(0.5*(uxux + uyuy) - sc*PetscExpScalar(u));
331       }
332     }
333   }
334   PetscCall(PetscLogFlops(12.0*info->ym*info->xm));
335   PetscCallMPI(MPI_Allreduce(&lobj,obj,1,MPIU_REAL,MPIU_SUM,comm));
336   PetscFunctionReturn(0);
337 }
338 
339 /*
340    FormJacobianLocal - Evaluates Jacobian matrix on local process patch
341 */
342 static PetscErrorCode FormJacobianLocal(DMDALocalInfo *info,PetscScalar **x,Mat jac,Mat jacpre,AppCtx *user)
343 {
344   PetscInt       i,j,k;
345   MatStencil     col[5],row;
346   PetscScalar    lambda,v[5],hx,hy,hxdhy,hydhx,sc;
347   DM             coordDA;
348   Vec            coordinates;
349   DMDACoor2d   **coords;
350 
351   PetscFunctionBeginUser;
352   lambda = user->param;
353   /* Extract coordinates */
354   PetscCall(DMGetCoordinateDM(info->da, &coordDA));
355   PetscCall(DMGetCoordinates(info->da, &coordinates));
356   PetscCall(DMDAVecGetArray(coordDA, coordinates, &coords));
357   hx     = info->xm > 1 ? PetscRealPart(coords[info->ys][info->xs+1].x) - PetscRealPart(coords[info->ys][info->xs].x) : 1.0;
358   hy     = info->ym > 1 ? PetscRealPart(coords[info->ys+1][info->xs].y) - PetscRealPart(coords[info->ys][info->xs].y) : 1.0;
359   PetscCall(DMDAVecRestoreArray(coordDA, coordinates, &coords));
360   hxdhy  = hx/hy;
361   hydhx  = hy/hx;
362   sc     = hx*hy*lambda;
363 
364   /*
365      Compute entries for the locally owned part of the Jacobian.
366       - Currently, all PETSc parallel matrix formats are partitioned by
367         contiguous chunks of rows across the processors.
368       - Each processor needs to insert only elements that it owns
369         locally (but any non-local elements will be sent to the
370         appropriate processor during matrix assembly).
371       - Here, we set all entries for a particular row at once.
372       - We can set matrix entries either using either
373         MatSetValuesLocal() or MatSetValues(), as discussed above.
374   */
375   for (j=info->ys; j<info->ys+info->ym; j++) {
376     for (i=info->xs; i<info->xs+info->xm; i++) {
377       row.j = j; row.i = i;
378       /* boundary points */
379       if (i == 0 || j == 0 || i == info->mx-1 || j == info->my-1) {
380         v[0] =  2.0*(hydhx + hxdhy);
381         PetscCall(MatSetValuesStencil(jacpre,1,&row,1,&row,v,INSERT_VALUES));
382       } else {
383         k = 0;
384         /* interior grid points */
385         if (j-1 != 0) {
386           v[k]     = -hxdhy;
387           col[k].j = j - 1; col[k].i = i;
388           k++;
389         }
390         if (i-1 != 0) {
391           v[k]     = -hydhx;
392           col[k].j = j;     col[k].i = i-1;
393           k++;
394         }
395 
396         v[k] = 2.0*(hydhx + hxdhy) - sc*PetscExpScalar(x[j][i]); col[k].j = row.j; col[k].i = row.i; k++;
397 
398         if (i+1 != info->mx-1) {
399           v[k]     = -hydhx;
400           col[k].j = j;     col[k].i = i+1;
401           k++;
402         }
403         if (j+1 != info->mx-1) {
404           v[k]     = -hxdhy;
405           col[k].j = j + 1; col[k].i = i;
406           k++;
407         }
408         PetscCall(MatSetValuesStencil(jacpre,1,&row,k,col,v,INSERT_VALUES));
409       }
410     }
411   }
412 
413   /*
414      Assemble matrix, using the 2-step process:
415        MatAssemblyBegin(), MatAssemblyEnd().
416   */
417   PetscCall(MatAssemblyBegin(jacpre,MAT_FINAL_ASSEMBLY));
418   PetscCall(MatAssemblyEnd(jacpre,MAT_FINAL_ASSEMBLY));
419   /*
420      Tell the matrix we will never add a new nonzero location to the
421      matrix. If we do, it will generate an error.
422   */
423   PetscCall(MatSetOption(jac,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE));
424   PetscFunctionReturn(0);
425 }
426 
427 static PetscErrorCode FormFunctionMatlab(SNES snes,Vec X,Vec F,void *ptr)
428 {
429 #if PetscDefined(HAVE_MATLAB_ENGINE)
430   AppCtx         *user = (AppCtx*)ptr;
431   PetscInt       Mx,My;
432   PetscReal      lambda,hx,hy;
433   Vec            localX,localF;
434   MPI_Comm       comm;
435   DM             da;
436 
437   PetscFunctionBeginUser;
438   PetscCall(SNESGetDM(snes,&da));
439   PetscCall(DMGetLocalVector(da,&localX));
440   PetscCall(DMGetLocalVector(da,&localF));
441   PetscCall(PetscObjectSetName((PetscObject)localX,"localX"));
442   PetscCall(PetscObjectSetName((PetscObject)localF,"localF"));
443   PetscCall(DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE));
444 
445   lambda = user->param;
446   hx     = 1.0/(PetscReal)(Mx-1);
447   hy     = 1.0/(PetscReal)(My-1);
448 
449   PetscCall(PetscObjectGetComm((PetscObject)snes,&comm));
450   /*
451      Scatter ghost points to local vector,using the 2-step process
452         DMGlobalToLocalBegin(),DMGlobalToLocalEnd().
453      By placing code between these two statements, computations can be
454      done while messages are in transition.
455   */
456   PetscCall(DMGlobalToLocalBegin(da,X,INSERT_VALUES,localX));
457   PetscCall(DMGlobalToLocalEnd(da,X,INSERT_VALUES,localX));
458   PetscCall(PetscMatlabEnginePut(PETSC_MATLAB_ENGINE_(comm),(PetscObject)localX));
459   PetscCall(PetscMatlabEngineEvaluate(PETSC_MATLAB_ENGINE_(comm),"localF=ex5m(localX,%18.16e,%18.16e,%18.16e)",(double)hx,(double)hy,(double)lambda));
460   PetscCall(PetscMatlabEngineGet(PETSC_MATLAB_ENGINE_(comm),(PetscObject)localF));
461 
462   /*
463      Insert values into global vector
464   */
465   PetscCall(DMLocalToGlobalBegin(da,localF,INSERT_VALUES,F));
466   PetscCall(DMLocalToGlobalEnd(da,localF,INSERT_VALUES,F));
467   PetscCall(DMRestoreLocalVector(da,&localX));
468   PetscCall(DMRestoreLocalVector(da,&localF));
469   PetscFunctionReturn(0);
470 #else
471     return 0;                     /* Never called */
472 #endif
473 }
474 
475 /* ------------------------------------------------------------------- */
476 /*
477       Applies some sweeps on nonlinear Gauss-Seidel on each process
478 
479  */
480 static PetscErrorCode NonlinearGS(SNES snes,Vec X, Vec B, void *ctx)
481 {
482   PetscInt       i,j,k,Mx,My,xs,ys,xm,ym,its,tot_its,sweeps,l;
483   PetscReal      lambda,hx,hy,hxdhy,hydhx,sc;
484   PetscScalar    **x,**b,bij,F,F0=0,J,u,un,us,ue,eu,uw,uxx,uyy,y;
485   PetscReal      atol,rtol,stol;
486   DM             da;
487   AppCtx         *user;
488   Vec            localX,localB;
489 
490   PetscFunctionBeginUser;
491   tot_its = 0;
492   PetscCall(SNESNGSGetSweeps(snes,&sweeps));
493   PetscCall(SNESNGSGetTolerances(snes,&atol,&rtol,&stol,&its));
494   PetscCall(SNESGetDM(snes,&da));
495   PetscCall(DMGetApplicationContext(da,&user));
496 
497   PetscCall(DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE));
498 
499   lambda = user->param;
500   hx     = 1.0/(PetscReal)(Mx-1);
501   hy     = 1.0/(PetscReal)(My-1);
502   sc     = hx*hy*lambda;
503   hxdhy  = hx/hy;
504   hydhx  = hy/hx;
505 
506   PetscCall(DMGetLocalVector(da,&localX));
507   if (B) {
508     PetscCall(DMGetLocalVector(da,&localB));
509   }
510   for (l=0; l<sweeps; l++) {
511 
512     PetscCall(DMGlobalToLocalBegin(da,X,INSERT_VALUES,localX));
513     PetscCall(DMGlobalToLocalEnd(da,X,INSERT_VALUES,localX));
514     if (B) {
515       PetscCall(DMGlobalToLocalBegin(da,B,INSERT_VALUES,localB));
516       PetscCall(DMGlobalToLocalEnd(da,B,INSERT_VALUES,localB));
517     }
518     /*
519      Get a pointer to vector data.
520      - For default PETSc vectors, VecGetArray() returns a pointer to
521      the data array.  Otherwise, the routine is implementation dependent.
522      - You MUST call VecRestoreArray() when you no longer need access to
523      the array.
524      */
525     PetscCall(DMDAVecGetArray(da,localX,&x));
526     if (B) PetscCall(DMDAVecGetArray(da,localB,&b));
527     /*
528      Get local grid boundaries (for 2-dimensional DMDA):
529      xs, ys   - starting grid indices (no ghost points)
530      xm, ym   - widths of local grid (no ghost points)
531      */
532     PetscCall(DMDAGetCorners(da,&xs,&ys,NULL,&xm,&ym,NULL));
533 
534     for (j=ys; j<ys+ym; j++) {
535       for (i=xs; i<xs+xm; i++) {
536         if (i == 0 || j == 0 || i == Mx-1 || j == My-1) {
537           /* boundary conditions are all zero Dirichlet */
538           x[j][i] = 0.0;
539         } else {
540           if (B) bij = b[j][i];
541           else   bij = 0.;
542 
543           u  = x[j][i];
544           un = x[j-1][i];
545           us = x[j+1][i];
546           ue = x[j][i-1];
547           uw = x[j][i+1];
548 
549           for (k=0; k<its; k++) {
550             eu  = PetscExpScalar(u);
551             uxx = (2.0*u - ue - uw)*hydhx;
552             uyy = (2.0*u - un - us)*hxdhy;
553             F   = uxx + uyy - sc*eu - bij;
554             if (k == 0) F0 = F;
555             J  = 2.0*(hydhx + hxdhy) - sc*eu;
556             y  = F/J;
557             u -= y;
558             tot_its++;
559 
560             if (atol > PetscAbsReal(PetscRealPart(F)) ||
561                 rtol*PetscAbsReal(PetscRealPart(F0)) > PetscAbsReal(PetscRealPart(F)) ||
562                 stol*PetscAbsReal(PetscRealPart(u)) > PetscAbsReal(PetscRealPart(y))) {
563               break;
564             }
565           }
566           x[j][i] = u;
567         }
568       }
569     }
570     /*
571      Restore vector
572      */
573     PetscCall(DMDAVecRestoreArray(da,localX,&x));
574     PetscCall(DMLocalToGlobalBegin(da,localX,INSERT_VALUES,X));
575     PetscCall(DMLocalToGlobalEnd(da,localX,INSERT_VALUES,X));
576   }
577   PetscCall(PetscLogFlops(tot_its*(21.0)));
578   PetscCall(DMRestoreLocalVector(da,&localX));
579   if (B) {
580     PetscCall(DMDAVecRestoreArray(da,localB,&b));
581     PetscCall(DMRestoreLocalVector(da,&localB));
582   }
583   PetscFunctionReturn(0);
584 }
585 
586 int main(int argc,char **argv)
587 {
588   SNES           snes;                         /* nonlinear solver */
589   Vec            x;                            /* solution vector */
590   AppCtx         user;                         /* user-defined work context */
591   PetscInt       its;                          /* iterations for convergence */
592   PetscReal      bratu_lambda_max = 6.81;
593   PetscReal      bratu_lambda_min = 0.;
594   PetscInt       MMS              = 1;
595   PetscBool      flg              = PETSC_FALSE,setMMS;
596   DM             da;
597   Vec            r               = NULL;
598   KSP            ksp;
599   PetscInt       lits,slits;
600 
601   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
602      Initialize program
603      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
604 
605   PetscCall(PetscInitialize(&argc,&argv,(char*)0,help));
606 
607   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
608      Initialize problem parameters
609   - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
610   user.param = 6.0;
611   PetscCall(PetscOptionsGetReal(NULL,NULL,"-par",&user.param,NULL));
612   PetscCheck(user.param <= bratu_lambda_max && user.param >= bratu_lambda_min,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Lambda, %g, is out of range, [%g, %g]", (double)user.param, (double)bratu_lambda_min, (double)bratu_lambda_max);
613   PetscCall(PetscOptionsGetInt(NULL,NULL,"-mms",&MMS,&setMMS));
614   if (MMS == 3) {
615     PetscInt mPar = 2, nPar = 1;
616     PetscCall(PetscOptionsGetInt(NULL,NULL,"-m_par",&mPar,NULL));
617     PetscCall(PetscOptionsGetInt(NULL,NULL,"-n_par",&nPar,NULL));
618     user.m = PetscPowInt(2,mPar);
619     user.n = PetscPowInt(2,nPar);
620   }
621 
622   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
623      Create nonlinear solver context
624      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
625   PetscCall(SNESCreate(PETSC_COMM_WORLD,&snes));
626   PetscCall(SNESSetCountersReset(snes,PETSC_FALSE));
627   PetscCall(SNESSetNGS(snes, NonlinearGS, NULL));
628 
629   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
630      Create distributed array (DMDA) to manage parallel grid and vectors
631   - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
632   PetscCall(DMDACreate2d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE,DMDA_STENCIL_STAR,4,4,PETSC_DECIDE,PETSC_DECIDE,1,1,NULL,NULL,&da));
633   PetscCall(DMSetFromOptions(da));
634   PetscCall(DMSetUp(da));
635   PetscCall(DMDASetUniformCoordinates(da, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0));
636   PetscCall(DMSetApplicationContext(da,&user));
637   PetscCall(SNESSetDM(snes,da));
638   /*  - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
639      Extract global vectors from DMDA; then duplicate for remaining
640      vectors that are the same types
641    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
642   PetscCall(DMCreateGlobalVector(da,&x));
643 
644   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
645      Set local function evaluation routine
646   - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
647   switch (MMS) {
648   case 0: user.mms_solution = ZeroBCSolution; user.mms_forcing = NULL; break;
649   case 1: user.mms_solution = MMSSolution1; user.mms_forcing = MMSForcing1; break;
650   case 2: user.mms_solution = MMSSolution2; user.mms_forcing = MMSForcing2; break;
651   case 3: user.mms_solution = MMSSolution3; user.mms_forcing = MMSForcing3; break;
652   case 4: user.mms_solution = MMSSolution4; user.mms_forcing = MMSForcing4; break;
653   default: SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_USER,"Unknown MMS type %" PetscInt_FMT,MMS);
654   }
655   PetscCall(DMDASNESSetFunctionLocal(da,INSERT_VALUES,(DMDASNESFunction)FormFunctionLocal,&user));
656   PetscCall(PetscOptionsGetBool(NULL,NULL,"-fd",&flg,NULL));
657   if (!flg) {
658     PetscCall(DMDASNESSetJacobianLocal(da,(DMDASNESJacobian)FormJacobianLocal,&user));
659   }
660 
661   PetscCall(PetscOptionsGetBool(NULL,NULL,"-obj",&flg,NULL));
662   if (flg) {
663     PetscCall(DMDASNESSetObjectiveLocal(da,(DMDASNESObjective)FormObjectiveLocal,&user));
664   }
665 
666   if (PetscDefined(HAVE_MATLAB_ENGINE)) {
667     PetscBool matlab_function = PETSC_FALSE;
668     PetscCall(PetscOptionsGetBool(NULL,NULL,"-matlab_function",&matlab_function,0));
669     if (matlab_function) {
670       PetscCall(VecDuplicate(x,&r));
671       PetscCall(SNESSetFunction(snes,r,FormFunctionMatlab,&user));
672     }
673   }
674 
675   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
676      Customize nonlinear solver; set runtime options
677    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
678   PetscCall(SNESSetFromOptions(snes));
679 
680   PetscCall(FormInitialGuess(da,&user,x));
681 
682   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
683      Solve nonlinear system
684      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
685   PetscCall(SNESSolve(snes,NULL,x));
686   PetscCall(SNESGetIterationNumber(snes,&its));
687 
688   PetscCall(SNESGetLinearSolveIterations(snes,&slits));
689   PetscCall(SNESGetKSP(snes,&ksp));
690   PetscCall(KSPGetTotalIterations(ksp,&lits));
691   PetscCheck(lits == slits,PETSC_COMM_WORLD,PETSC_ERR_PLIB,"Number of total linear iterations reported by SNES %" PetscInt_FMT " does not match reported by KSP %" PetscInt_FMT,slits,lits);
692   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
693    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
694 
695   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
696      If using MMS, check the l_2 error
697    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
698   if (setMMS) {
699     Vec       e;
700     PetscReal errorl2, errorinf;
701     PetscInt  N;
702 
703     PetscCall(VecDuplicate(x, &e));
704     PetscCall(PetscObjectViewFromOptions((PetscObject) x, NULL, "-sol_view"));
705     PetscCall(FormExactSolution(da, &user, e));
706     PetscCall(PetscObjectViewFromOptions((PetscObject) e, NULL, "-exact_view"));
707     PetscCall(VecAXPY(e, -1.0, x));
708     PetscCall(PetscObjectViewFromOptions((PetscObject) e, NULL, "-error_view"));
709     PetscCall(VecNorm(e, NORM_2, &errorl2));
710     PetscCall(VecNorm(e, NORM_INFINITY, &errorinf));
711     PetscCall(VecGetSize(e, &N));
712     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "N: %" PetscInt_FMT " error L2 %g inf %g\n", N, (double)(errorl2/PetscSqrtReal((PetscReal)N)), (double) errorinf));
713     PetscCall(VecDestroy(&e));
714     PetscCall(PetscLogEventSetDof(SNES_Solve, 0, N));
715     PetscCall(PetscLogEventSetError(SNES_Solve, 0, errorl2/PetscSqrtReal(N)));
716   }
717 
718   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
719      Free work space.  All PETSc objects should be destroyed when they
720      are no longer needed.
721    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
722   PetscCall(VecDestroy(&r));
723   PetscCall(VecDestroy(&x));
724   PetscCall(SNESDestroy(&snes));
725   PetscCall(DMDestroy(&da));
726   PetscCall(PetscFinalize());
727   return 0;
728 }
729 
730 /*TEST
731 
732    test:
733      suffix: asm_0
734      requires: !single
735      args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 2 -pc_asm_overlap 0 -pc_asm_local_type additive -sub_pc_type lu
736 
737    test:
738      suffix: msm_0
739      requires: !single
740      args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 2 -pc_asm_overlap 0 -pc_asm_local_type multiplicative -sub_pc_type lu
741 
742    test:
743      suffix: asm_1
744      requires: !single
745      args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 2 -pc_asm_overlap 0 -pc_asm_local_type additive -sub_pc_type lu -da_grid_x 8
746 
747    test:
748      suffix: msm_1
749      requires: !single
750      args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 2 -pc_asm_overlap 0 -pc_asm_local_type multiplicative -sub_pc_type lu -da_grid_x 8
751 
752    test:
753      suffix: asm_2
754      requires: !single
755      args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 3 -pc_asm_overlap 0 -pc_asm_local_type additive -sub_pc_type lu -da_grid_x 8
756 
757    test:
758      suffix: msm_2
759      requires: !single
760      args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 3 -pc_asm_overlap 0 -pc_asm_local_type multiplicative -sub_pc_type lu -da_grid_x 8
761 
762    test:
763      suffix: asm_3
764      requires: !single
765      args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 4 -pc_asm_overlap 0 -pc_asm_local_type additive -sub_pc_type lu -da_grid_x 8
766 
767    test:
768      suffix: msm_3
769      requires: !single
770      args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 4 -pc_asm_overlap 0 -pc_asm_local_type multiplicative -sub_pc_type lu -da_grid_x 8
771 
772    test:
773      suffix: asm_4
774      requires: !single
775      nsize: 2
776      args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 2 -pc_asm_overlap 0 -pc_asm_local_type additive -sub_pc_type lu -da_grid_x 8
777 
778    test:
779      suffix: msm_4
780      requires: !single
781      nsize: 2
782      args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 2 -pc_asm_overlap 0 -pc_asm_local_type multiplicative -sub_pc_type lu -da_grid_x 8
783 
784    test:
785      suffix: asm_5
786      requires: !single
787      nsize: 2
788      args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 4 -pc_asm_overlap 0 -pc_asm_local_type additive -sub_pc_type lu -da_grid_x 8
789 
790    test:
791      suffix: msm_5
792      requires: !single
793      nsize: 2
794      args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 4 -pc_asm_overlap 0 -pc_asm_local_type multiplicative -sub_pc_type lu -da_grid_x 8
795 
796    test:
797      args: -snes_rtol 1.e-5 -pc_type mg -ksp_monitor_short -snes_view -pc_mg_levels 3 -pc_mg_galerkin pmat -da_grid_x 17 -da_grid_y 17 -mg_levels_ksp_monitor_short -mg_levels_ksp_norm_type unpreconditioned -snes_monitor_short -mg_levels_ksp_chebyshev_esteig 0.5,1.1 -mg_levels_pc_type sor -pc_mg_type full
798      requires: !single
799 
800    test:
801      suffix: 2
802      args: -pc_type mg -ksp_converged_reason -snes_view -pc_mg_galerkin pmat -snes_grid_sequence 3 -mg_levels_ksp_norm_type unpreconditioned -snes_monitor_short -mg_levels_ksp_chebyshev_esteig 0.5,1.1 -mg_levels_pc_type sor -pc_mg_type full -ksp_atol -1.
803 
804    test:
805      suffix: 3
806      nsize: 2
807      args: -snes_grid_sequence 2 -snes_mf_operator -snes_converged_reason -snes_view -pc_type mg -snes_atol -1 -snes_rtol 1.e-2
808      filter: grep -v "otal number of function evaluations"
809 
810    test:
811      suffix: 4
812      nsize: 2
813      args: -snes_grid_sequence 2 -snes_monitor_short -ksp_converged_reason -snes_converged_reason -snes_view -pc_type mg -snes_atol -1 -ksp_atol -1
814 
815    test:
816      suffix: 5_anderson
817      args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type anderson
818 
819    test:
820      suffix: 5_aspin
821      nsize: 4
822      args: -snes_monitor_short -ksp_monitor_short -snes_converged_reason -da_refine 4 -da_overlap 3 -snes_type aspin -snes_view
823 
824    test:
825      suffix: 5_broyden
826      args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type qn -snes_qn_type broyden -snes_qn_m 10
827 
828    test:
829      suffix: 5_fas
830      args: -fas_coarse_snes_max_it 1 -fas_coarse_pc_type lu -fas_coarse_ksp_type preonly -snes_monitor_short -snes_type fas -fas_coarse_ksp_type richardson -da_refine 6
831      requires: !single
832 
833    test:
834      suffix: 5_fas_additive
835      args: -fas_coarse_snes_max_it 1 -fas_coarse_pc_type lu -fas_coarse_ksp_type preonly -snes_monitor_short -snes_type fas -fas_coarse_ksp_type richardson -da_refine 6 -snes_fas_type additive -snes_max_it 50
836 
837    test:
838      suffix: 5_fas_monitor
839      args: -da_refine 1 -snes_type fas -snes_fas_monitor
840      requires: !single
841 
842    test:
843      suffix: 5_ls
844      args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type newtonls
845 
846    test:
847      suffix: 5_ls_sell_sor
848      args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type newtonls -dm_mat_type sell -pc_type sor
849      output_file: output/ex5_5_ls.out
850 
851    test:
852      suffix: 5_nasm
853      nsize: 4
854      args: -snes_monitor_short -snes_converged_reason -da_refine 4 -da_overlap 3 -snes_type nasm -snes_nasm_type restrict -snes_max_it 10
855 
856    test:
857      suffix: 5_ncg
858      args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type ncg -snes_ncg_type fr
859 
860    test:
861      suffix: 5_newton_asm_dmda
862      nsize: 4
863      args: -snes_monitor_short -ksp_monitor_short -snes_converged_reason -da_refine 4 -da_overlap 3 -snes_type newtonls -pc_type asm -pc_asm_dm_subdomains -malloc_dump
864      requires: !single
865 
866    test:
867      suffix: 5_newton_gasm_dmda
868      nsize: 4
869      args: -snes_monitor_short -ksp_monitor_short -snes_converged_reason -da_refine 4 -da_overlap 3 -snes_type newtonls -pc_type gasm -malloc_dump
870      requires: !single
871 
872    test:
873      suffix: 5_ngmres
874      args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type ngmres -snes_ngmres_m 10
875 
876    test:
877      suffix: 5_ngmres_fas
878      args: -snes_rtol 1.e-4 -snes_type ngmres -npc_fas_coarse_snes_max_it 1 -npc_fas_coarse_snes_type newtonls -npc_fas_coarse_pc_type lu -npc_fas_coarse_ksp_type preonly -snes_ngmres_m 10 -snes_monitor_short -npc_snes_max_it 1 -npc_snes_type fas -npc_fas_coarse_ksp_type richardson -da_refine 6
879 
880    test:
881      suffix: 5_ngmres_ngs
882      args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type ngmres -npc_snes_type ngs -npc_snes_max_it 1
883 
884    test:
885      suffix: 5_ngmres_nrichardson
886      args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type ngmres -snes_ngmres_m 10 -npc_snes_type nrichardson -npc_snes_max_it 3
887      output_file: output/ex5_5_ngmres_richardson.out
888 
889    test:
890      suffix: 5_nrichardson
891      args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type nrichardson
892 
893    test:
894      suffix: 5_qn
895      args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type qn -snes_linesearch_type cp -snes_qn_m 10
896 
897    test:
898      suffix: 6
899      nsize: 4
900      args: -snes_converged_reason -ksp_converged_reason -da_grid_x 129 -da_grid_y 129 -pc_type mg -pc_mg_levels 8 -mg_levels_ksp_type chebyshev -mg_levels_ksp_chebyshev_esteig 0,0.5,0,1.1 -mg_levels_ksp_max_it 2
901 
902    test:
903      requires: complex !single
904      suffix: complex
905      args: -snes_mf_operator -mat_mffd_complex -snes_monitor
906 
907 TEST*/
908