xref: /petsc/src/snes/tutorials/ex5.c (revision af2269012df9f78a2e64b5a712f72d93507752c9)
1 static char help[] = "Bratu nonlinear PDE in 2d.\n\
2 We solve the  Bratu (SFI - solid fuel ignition) problem in a 2D rectangular\n\
3 domain, using distributed arrays (DMDAs) to partition the parallel grid.\n\
4 The command line options include:\n\
5   -par <parameter>, where <parameter> indicates the problem's nonlinearity\n\
6      problem SFI:  <parameter> = Bratu parameter (0 <= par <= 6.81)\n\n\
7   -m_par/n_par <parameter>, where <parameter> indicates an integer\n \
8       that MMS3 will be evaluated with 2^m_par, 2^n_par";
9 
10 /* ------------------------------------------------------------------------
11 
12     Solid Fuel Ignition (SFI) problem.  This problem is modeled by
13     the partial differential equation
14 
15             -Laplacian u - lambda*exp(u) = 0,  0 < x,y < 1,
16 
17     with boundary conditions
18 
19              u = 0  for  x = 0, x = 1, y = 0, y = 1.
20 
21     A finite difference approximation with the usual 5-point stencil
22     is used to discretize the boundary value problem to obtain a nonlinear
23     system of equations.
24 
25       This example shows how geometric multigrid can be run transparently with a nonlinear solver so long
26       as SNESSetDM() is provided. Example usage
27 
28       ./ex5 -pc_type mg -ksp_monitor  -snes_view -pc_mg_levels 3 -pc_mg_galerkin pmat -da_grid_x 17 -da_grid_y 17
29              -mg_levels_ksp_monitor -snes_monitor -mg_levels_pc_type sor -pc_mg_type full
30 
31       or to run with grid sequencing on the nonlinear problem (note that you do not need to provide the number of
32          multigrid levels, it will be determined automatically based on the number of refinements done)
33 
34       ./ex5 -pc_type mg -ksp_monitor  -snes_view -pc_mg_galerkin pmat -snes_grid_sequence 3
35              -mg_levels_ksp_monitor -snes_monitor -mg_levels_pc_type sor -pc_mg_type full
36 
37   ------------------------------------------------------------------------- */
38 
39 /*
40    Include "petscdmda.h" so that we can use distributed arrays (DMDAs).
41    Include "petscsnes.h" so that we can use SNES solvers.  Note that this
42 */
43 #include <petscdm.h>
44 #include <petscdmda.h>
45 #include <petscsnes.h>
46 #include <petscmatlab.h>
47 #include <petsc/private/snesimpl.h> /* For SNES_Solve event */
48 
49 /*
50    User-defined application context - contains data needed by the
51    application-provided call-back routines, FormJacobianLocal() and
52    FormFunctionLocal().
53 */
54 typedef struct AppCtx AppCtx;
55 struct AppCtx {
56   PetscReal param;          /* test problem parameter */
57   PetscInt  m,n;            /* MMS3 parameters */
58   PetscErrorCode (*mms_solution)(AppCtx*,const DMDACoor2d*,PetscScalar*);
59   PetscErrorCode (*mms_forcing)(AppCtx*,const DMDACoor2d*,PetscScalar*);
60 };
61 
62 /* ------------------------------------------------------------------- */
63 /*
64    FormInitialGuess - Forms initial approximation.
65 
66    Input Parameters:
67    da - The DM
68    user - user-defined application context
69 
70    Output Parameter:
71    X - vector
72  */
73 static PetscErrorCode FormInitialGuess(DM da,AppCtx *user,Vec X)
74 {
75   PetscInt       i,j,Mx,My,xs,ys,xm,ym;
76   PetscReal      lambda,temp1,temp,hx,hy;
77   PetscScalar    **x;
78 
79   PetscFunctionBeginUser;
80   PetscCall(DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE));
81 
82   lambda = user->param;
83   hx     = 1.0/(PetscReal)(Mx-1);
84   hy     = 1.0/(PetscReal)(My-1);
85   temp1  = lambda/(lambda + 1.0);
86 
87   /*
88      Get a pointer to vector data.
89        - For default PETSc vectors, VecGetArray() returns a pointer to
90          the data array.  Otherwise, the routine is implementation dependent.
91        - You MUST call VecRestoreArray() when you no longer need access to
92          the array.
93   */
94   PetscCall(DMDAVecGetArray(da,X,&x));
95 
96   /*
97      Get local grid boundaries (for 2-dimensional DMDA):
98        xs, ys   - starting grid indices (no ghost points)
99        xm, ym   - widths of local grid (no ghost points)
100 
101   */
102   PetscCall(DMDAGetCorners(da,&xs,&ys,NULL,&xm,&ym,NULL));
103 
104   /*
105      Compute initial guess over the locally owned part of the grid
106   */
107   for (j=ys; j<ys+ym; j++) {
108     temp = (PetscReal)(PetscMin(j,My-j-1))*hy;
109     for (i=xs; i<xs+xm; i++) {
110       if (i == 0 || j == 0 || i == Mx-1 || j == My-1) {
111         /* boundary conditions are all zero Dirichlet */
112         x[j][i] = 0.0;
113       } else {
114         x[j][i] = temp1*PetscSqrtReal(PetscMin((PetscReal)(PetscMin(i,Mx-i-1))*hx,temp));
115       }
116     }
117   }
118 
119   /*
120      Restore vector
121   */
122   PetscCall(DMDAVecRestoreArray(da,X,&x));
123   PetscFunctionReturn(0);
124 }
125 
126 /*
127   FormExactSolution - Forms MMS solution
128 
129   Input Parameters:
130   da - The DM
131   user - user-defined application context
132 
133   Output Parameter:
134   X - vector
135  */
136 static PetscErrorCode FormExactSolution(DM da, AppCtx *user, Vec U)
137 {
138   DM             coordDA;
139   Vec            coordinates;
140   DMDACoor2d   **coords;
141   PetscScalar  **u;
142   PetscInt       xs, ys, xm, ym, i, j;
143 
144   PetscFunctionBeginUser;
145   PetscCall(DMDAGetCorners(da, &xs, &ys, NULL, &xm, &ym, NULL));
146   PetscCall(DMGetCoordinateDM(da, &coordDA));
147   PetscCall(DMGetCoordinates(da, &coordinates));
148   PetscCall(DMDAVecGetArray(coordDA, coordinates, &coords));
149   PetscCall(DMDAVecGetArray(da, U, &u));
150   for (j = ys; j < ys+ym; ++j) {
151     for (i = xs; i < xs+xm; ++i) {
152       user->mms_solution(user,&coords[j][i],&u[j][i]);
153     }
154   }
155   PetscCall(DMDAVecRestoreArray(da, U, &u));
156   PetscCall(DMDAVecRestoreArray(coordDA, coordinates, &coords));
157   PetscFunctionReturn(0);
158 }
159 
160 static PetscErrorCode ZeroBCSolution(AppCtx *user,const DMDACoor2d *c,PetscScalar *u)
161 {
162   u[0] = 0.;
163   return 0;
164 }
165 
166 /* The functions below evaluate the MMS solution u(x,y) and associated forcing
167 
168      f(x,y) = -u_xx - u_yy - lambda exp(u)
169 
170   such that u(x,y) is an exact solution with f(x,y) as the right hand side forcing term.
171  */
172 static PetscErrorCode MMSSolution1(AppCtx *user,const DMDACoor2d *c,PetscScalar *u)
173 {
174   PetscReal x = PetscRealPart(c->x), y = PetscRealPart(c->y);
175   u[0] = x*(1 - x)*y*(1 - y);
176   PetscLogFlops(5);
177   return 0;
178 }
179 static PetscErrorCode MMSForcing1(AppCtx *user,const DMDACoor2d *c,PetscScalar *f)
180 {
181   PetscReal x = PetscRealPart(c->x), y = PetscRealPart(c->y);
182   f[0] = 2*x*(1 - x) + 2*y*(1 - y) - user->param*PetscExpReal(x*(1 - x)*y*(1 - y));
183   return 0;
184 }
185 
186 static PetscErrorCode MMSSolution2(AppCtx *user,const DMDACoor2d *c,PetscScalar *u)
187 {
188   PetscReal x = PetscRealPart(c->x), y = PetscRealPart(c->y);
189   u[0] = PetscSinReal(PETSC_PI*x)*PetscSinReal(PETSC_PI*y);
190   PetscLogFlops(5);
191   return 0;
192 }
193 static PetscErrorCode MMSForcing2(AppCtx *user,const DMDACoor2d *c,PetscScalar *f)
194 {
195   PetscReal x = PetscRealPart(c->x), y = PetscRealPart(c->y);
196   f[0] = 2*PetscSqr(PETSC_PI)*PetscSinReal(PETSC_PI*x)*PetscSinReal(PETSC_PI*y) - user->param*PetscExpReal(PetscSinReal(PETSC_PI*x)*PetscSinReal(PETSC_PI*y));
197   return 0;
198 }
199 
200 static PetscErrorCode MMSSolution3(AppCtx *user,const DMDACoor2d *c,PetscScalar *u)
201 {
202   PetscReal x = PetscRealPart(c->x), y = PetscRealPart(c->y);
203   u[0] = PetscSinReal(user->m*PETSC_PI*x*(1-y))*PetscSinReal(user->n*PETSC_PI*y*(1-x));
204   PetscLogFlops(5);
205   return 0;
206 }
207 static PetscErrorCode MMSForcing3(AppCtx *user,const DMDACoor2d *c,PetscScalar *f)
208 {
209   PetscReal x = PetscRealPart(c->x), y = PetscRealPart(c->y);
210   PetscReal m = user->m, n = user->n, lambda = user->param;
211   f[0] = (-(PetscExpReal(PetscSinReal(m*PETSC_PI*x*(1 - y))*PetscSinReal(n*PETSC_PI*(1 - x)*y))*lambda)
212           + PetscSqr(PETSC_PI)*(-2*m*n*((-1 + x)*x + (-1 + y)*y)*PetscCosReal(m*PETSC_PI*x*(-1 + y))*PetscCosReal(n*PETSC_PI*(-1 + x)*y)
213                                 + (PetscSqr(m)*(PetscSqr(x) + PetscSqr(-1 + y)) + PetscSqr(n)*(PetscSqr(-1 + x) + PetscSqr(y)))
214                                 *PetscSinReal(m*PETSC_PI*x*(-1 + y))*PetscSinReal(n*PETSC_PI*(-1 + x)*y)));
215   return 0;
216 }
217 
218 static PetscErrorCode MMSSolution4(AppCtx *user,const DMDACoor2d *c,PetscScalar *u)
219 {
220   const PetscReal Lx = 1.,Ly = 1.;
221   PetscReal x = PetscRealPart(c->x), y = PetscRealPart(c->y);
222   u[0] = (PetscPowReal(x,4)-PetscSqr(Lx)*PetscSqr(x))*(PetscPowReal(y,4)-PetscSqr(Ly)*PetscSqr(y));
223   PetscLogFlops(9);
224   return 0;
225 }
226 static PetscErrorCode MMSForcing4(AppCtx *user,const DMDACoor2d *c,PetscScalar *f)
227 {
228   const PetscReal Lx = 1.,Ly = 1.;
229   PetscReal x = PetscRealPart(c->x), y = PetscRealPart(c->y);
230   f[0] = (2*PetscSqr(x)*(PetscSqr(x)-PetscSqr(Lx))*(PetscSqr(Ly)-6*PetscSqr(y))
231           + 2*PetscSqr(y)*(PetscSqr(Lx)-6*PetscSqr(x))*(PetscSqr(y)-PetscSqr(Ly))
232           - user->param*PetscExpReal((PetscPowReal(x,4)-PetscSqr(Lx)*PetscSqr(x))*(PetscPowReal(y,4)-PetscSqr(Ly)*PetscSqr(y))));
233   return 0;
234 }
235 
236 /* ------------------------------------------------------------------- */
237 /*
238    FormFunctionLocal - Evaluates nonlinear function, F(x) on local process patch
239 
240  */
241 static PetscErrorCode FormFunctionLocal(DMDALocalInfo *info,PetscScalar **x,PetscScalar **f,AppCtx *user)
242 {
243   PetscInt       i,j;
244   PetscReal      lambda,hx,hy,hxdhy,hydhx;
245   PetscScalar    u,ue,uw,un,us,uxx,uyy,mms_solution,mms_forcing;
246   DMDACoor2d     c;
247 
248   PetscFunctionBeginUser;
249   lambda = user->param;
250   hx     = 1.0/(PetscReal)(info->mx-1);
251   hy     = 1.0/(PetscReal)(info->my-1);
252   hxdhy  = hx/hy;
253   hydhx  = hy/hx;
254   /*
255      Compute function over the locally owned part of the grid
256   */
257   for (j=info->ys; j<info->ys+info->ym; j++) {
258     for (i=info->xs; i<info->xs+info->xm; i++) {
259       if (i == 0 || j == 0 || i == info->mx-1 || j == info->my-1) {
260         c.x = i*hx; c.y = j*hy;
261         PetscCall(user->mms_solution(user,&c,&mms_solution));
262         f[j][i] = 2.0*(hydhx+hxdhy)*(x[j][i] - mms_solution);
263       } else {
264         u  = x[j][i];
265         uw = x[j][i-1];
266         ue = x[j][i+1];
267         un = x[j-1][i];
268         us = x[j+1][i];
269 
270         /* Enforce boundary conditions at neighboring points -- setting these values causes the Jacobian to be symmetric. */
271         if (i-1 == 0) {c.x = (i-1)*hx; c.y = j*hy; PetscCall(user->mms_solution(user,&c,&uw));}
272         if (i+1 == info->mx-1) {c.x = (i+1)*hx; c.y = j*hy; PetscCall(user->mms_solution(user,&c,&ue));}
273         if (j-1 == 0) {c.x = i*hx; c.y = (j-1)*hy; PetscCall(user->mms_solution(user,&c,&un));}
274         if (j+1 == info->my-1) {c.x = i*hx; c.y = (j+1)*hy; PetscCall(user->mms_solution(user,&c,&us));}
275 
276         uxx     = (2.0*u - uw - ue)*hydhx;
277         uyy     = (2.0*u - un - us)*hxdhy;
278         mms_forcing = 0;
279         c.x = i*hx; c.y = j*hy;
280         if (user->mms_forcing) PetscCall(user->mms_forcing(user,&c,&mms_forcing));
281         f[j][i] = uxx + uyy - hx*hy*(lambda*PetscExpScalar(u) + mms_forcing);
282       }
283     }
284   }
285   PetscCall(PetscLogFlops(11.0*info->ym*info->xm));
286   PetscFunctionReturn(0);
287 }
288 
289 /* FormObjectiveLocal - Evaluates nonlinear function, F(x) on local process patch */
290 static PetscErrorCode FormObjectiveLocal(DMDALocalInfo *info,PetscScalar **x,PetscReal *obj,AppCtx *user)
291 {
292   PetscInt       i,j;
293   PetscReal      lambda,hx,hy,hxdhy,hydhx,sc,lobj=0;
294   PetscScalar    u,ue,uw,un,us,uxux,uyuy;
295   MPI_Comm       comm;
296 
297   PetscFunctionBeginUser;
298   *obj   = 0;
299   PetscCall(PetscObjectGetComm((PetscObject)info->da,&comm));
300   lambda = user->param;
301   hx     = 1.0/(PetscReal)(info->mx-1);
302   hy     = 1.0/(PetscReal)(info->my-1);
303   sc     = hx*hy*lambda;
304   hxdhy  = hx/hy;
305   hydhx  = hy/hx;
306   /*
307      Compute function over the locally owned part of the grid
308   */
309   for (j=info->ys; j<info->ys+info->ym; j++) {
310     for (i=info->xs; i<info->xs+info->xm; i++) {
311       if (i == 0 || j == 0 || i == info->mx-1 || j == info->my-1) {
312         lobj += PetscRealPart((hydhx + hxdhy)*x[j][i]*x[j][i]);
313       } else {
314         u  = x[j][i];
315         uw = x[j][i-1];
316         ue = x[j][i+1];
317         un = x[j-1][i];
318         us = x[j+1][i];
319 
320         if (i-1 == 0) uw = 0.;
321         if (i+1 == info->mx-1) ue = 0.;
322         if (j-1 == 0) un = 0.;
323         if (j+1 == info->my-1) us = 0.;
324 
325         /* F[u] = 1/2\int_{\omega}\nabla^2u(x)*u(x)*dx */
326 
327         uxux = u*(2.*u - ue - uw)*hydhx;
328         uyuy = u*(2.*u - un - us)*hxdhy;
329 
330         lobj += PetscRealPart(0.5*(uxux + uyuy) - sc*PetscExpScalar(u));
331       }
332     }
333   }
334   PetscCall(PetscLogFlops(12.0*info->ym*info->xm));
335   PetscCallMPI(MPI_Allreduce(&lobj,obj,1,MPIU_REAL,MPIU_SUM,comm));
336   PetscFunctionReturn(0);
337 }
338 
339 /*
340    FormJacobianLocal - Evaluates Jacobian matrix on local process patch
341 */
342 static PetscErrorCode FormJacobianLocal(DMDALocalInfo *info,PetscScalar **x,Mat jac,Mat jacpre,AppCtx *user)
343 {
344   PetscInt       i,j,k;
345   MatStencil     col[5],row;
346   PetscScalar    lambda,v[5],hx,hy,hxdhy,hydhx,sc;
347   DM             coordDA;
348   Vec            coordinates;
349   DMDACoor2d   **coords;
350 
351   PetscFunctionBeginUser;
352   lambda = user->param;
353   /* Extract coordinates */
354   PetscCall(DMGetCoordinateDM(info->da, &coordDA));
355   PetscCall(DMGetCoordinates(info->da, &coordinates));
356   PetscCall(DMDAVecGetArray(coordDA, coordinates, &coords));
357   hx     = info->xm > 1 ? PetscRealPart(coords[info->ys][info->xs+1].x) - PetscRealPart(coords[info->ys][info->xs].x) : 1.0;
358   hy     = info->ym > 1 ? PetscRealPart(coords[info->ys+1][info->xs].y) - PetscRealPart(coords[info->ys][info->xs].y) : 1.0;
359   PetscCall(DMDAVecRestoreArray(coordDA, coordinates, &coords));
360   hxdhy  = hx/hy;
361   hydhx  = hy/hx;
362   sc     = hx*hy*lambda;
363 
364   /*
365      Compute entries for the locally owned part of the Jacobian.
366       - Currently, all PETSc parallel matrix formats are partitioned by
367         contiguous chunks of rows across the processors.
368       - Each processor needs to insert only elements that it owns
369         locally (but any non-local elements will be sent to the
370         appropriate processor during matrix assembly).
371       - Here, we set all entries for a particular row at once.
372       - We can set matrix entries either using either
373         MatSetValuesLocal() or MatSetValues(), as discussed above.
374   */
375   for (j=info->ys; j<info->ys+info->ym; j++) {
376     for (i=info->xs; i<info->xs+info->xm; i++) {
377       row.j = j; row.i = i;
378       /* boundary points */
379       if (i == 0 || j == 0 || i == info->mx-1 || j == info->my-1) {
380         v[0] =  2.0*(hydhx + hxdhy);
381         PetscCall(MatSetValuesStencil(jacpre,1,&row,1,&row,v,INSERT_VALUES));
382       } else {
383         k = 0;
384         /* interior grid points */
385         if (j-1 != 0) {
386           v[k]     = -hxdhy;
387           col[k].j = j - 1; col[k].i = i;
388           k++;
389         }
390         if (i-1 != 0) {
391           v[k]     = -hydhx;
392           col[k].j = j;     col[k].i = i-1;
393           k++;
394         }
395 
396         v[k] = 2.0*(hydhx + hxdhy) - sc*PetscExpScalar(x[j][i]); col[k].j = row.j; col[k].i = row.i; k++;
397 
398         if (i+1 != info->mx-1) {
399           v[k]     = -hydhx;
400           col[k].j = j;     col[k].i = i+1;
401           k++;
402         }
403         if (j+1 != info->mx-1) {
404           v[k]     = -hxdhy;
405           col[k].j = j + 1; col[k].i = i;
406           k++;
407         }
408         PetscCall(MatSetValuesStencil(jacpre,1,&row,k,col,v,INSERT_VALUES));
409       }
410     }
411   }
412 
413   /*
414      Assemble matrix, using the 2-step process:
415        MatAssemblyBegin(), MatAssemblyEnd().
416   */
417   PetscCall(MatAssemblyBegin(jacpre,MAT_FINAL_ASSEMBLY));
418   PetscCall(MatAssemblyEnd(jacpre,MAT_FINAL_ASSEMBLY));
419   /*
420      Tell the matrix we will never add a new nonzero location to the
421      matrix. If we do, it will generate an error.
422   */
423   PetscCall(MatSetOption(jac,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE));
424   PetscFunctionReturn(0);
425 }
426 
427 static PetscErrorCode FormFunctionMatlab(SNES snes,Vec X,Vec F,void *ptr)
428 {
429 #if PetscDefined(HAVE_MATLAB_ENGINE)
430   AppCtx         *user = (AppCtx*)ptr;
431   PetscInt       Mx,My;
432   PetscReal      lambda,hx,hy;
433   Vec            localX,localF;
434   MPI_Comm       comm;
435   DM             da;
436 
437   PetscFunctionBeginUser;
438   PetscCall(SNESGetDM(snes,&da));
439   PetscCall(DMGetLocalVector(da,&localX));
440   PetscCall(DMGetLocalVector(da,&localF));
441   PetscCall(PetscObjectSetName((PetscObject)localX,"localX"));
442   PetscCall(PetscObjectSetName((PetscObject)localF,"localF"));
443   PetscCall(DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE));
444 
445   lambda = user->param;
446   hx     = 1.0/(PetscReal)(Mx-1);
447   hy     = 1.0/(PetscReal)(My-1);
448 
449   PetscCall(PetscObjectGetComm((PetscObject)snes,&comm));
450   /*
451      Scatter ghost points to local vector,using the 2-step process
452         DMGlobalToLocalBegin(),DMGlobalToLocalEnd().
453      By placing code between these two statements, computations can be
454      done while messages are in transition.
455   */
456   PetscCall(DMGlobalToLocalBegin(da,X,INSERT_VALUES,localX));
457   PetscCall(DMGlobalToLocalEnd(da,X,INSERT_VALUES,localX));
458   PetscCall(PetscMatlabEnginePut(PETSC_MATLAB_ENGINE_(comm),(PetscObject)localX));
459   PetscCall(PetscMatlabEngineEvaluate(PETSC_MATLAB_ENGINE_(comm),"localF=ex5m(localX,%18.16e,%18.16e,%18.16e)",(double)hx,(double)hy,(double)lambda));
460   PetscCall(PetscMatlabEngineGet(PETSC_MATLAB_ENGINE_(comm),(PetscObject)localF));
461 
462   /*
463      Insert values into global vector
464   */
465   PetscCall(DMLocalToGlobalBegin(da,localF,INSERT_VALUES,F));
466   PetscCall(DMLocalToGlobalEnd(da,localF,INSERT_VALUES,F));
467   PetscCall(DMRestoreLocalVector(da,&localX));
468   PetscCall(DMRestoreLocalVector(da,&localF));
469   PetscFunctionReturn(0);
470 #else
471     return 0;                     /* Never called */
472 #endif
473 }
474 
475 /* ------------------------------------------------------------------- */
476 /*
477       Applies some sweeps on nonlinear Gauss-Seidel on each process
478 
479  */
480 static PetscErrorCode NonlinearGS(SNES snes,Vec X, Vec B, void *ctx)
481 {
482   PetscInt       i,j,k,Mx,My,xs,ys,xm,ym,its,tot_its,sweeps,l;
483   PetscReal      lambda,hx,hy,hxdhy,hydhx,sc;
484   PetscScalar    **x,**b,bij,F,F0=0,J,u,un,us,ue,eu,uw,uxx,uyy,y;
485   PetscReal      atol,rtol,stol;
486   DM             da;
487   AppCtx         *user;
488   Vec            localX,localB;
489 
490   PetscFunctionBeginUser;
491   tot_its = 0;
492   PetscCall(SNESNGSGetSweeps(snes,&sweeps));
493   PetscCall(SNESNGSGetTolerances(snes,&atol,&rtol,&stol,&its));
494   PetscCall(SNESGetDM(snes,&da));
495   PetscCall(DMGetApplicationContext(da,&user));
496 
497   PetscCall(DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE));
498 
499   lambda = user->param;
500   hx     = 1.0/(PetscReal)(Mx-1);
501   hy     = 1.0/(PetscReal)(My-1);
502   sc     = hx*hy*lambda;
503   hxdhy  = hx/hy;
504   hydhx  = hy/hx;
505 
506   PetscCall(DMGetLocalVector(da,&localX));
507   if (B) {
508     PetscCall(DMGetLocalVector(da,&localB));
509   }
510   for (l=0; l<sweeps; l++) {
511 
512     PetscCall(DMGlobalToLocalBegin(da,X,INSERT_VALUES,localX));
513     PetscCall(DMGlobalToLocalEnd(da,X,INSERT_VALUES,localX));
514     if (B) {
515       PetscCall(DMGlobalToLocalBegin(da,B,INSERT_VALUES,localB));
516       PetscCall(DMGlobalToLocalEnd(da,B,INSERT_VALUES,localB));
517     }
518     /*
519      Get a pointer to vector data.
520      - For default PETSc vectors, VecGetArray() returns a pointer to
521      the data array.  Otherwise, the routine is implementation dependent.
522      - You MUST call VecRestoreArray() when you no longer need access to
523      the array.
524      */
525     PetscCall(DMDAVecGetArray(da,localX,&x));
526     if (B) PetscCall(DMDAVecGetArray(da,localB,&b));
527     /*
528      Get local grid boundaries (for 2-dimensional DMDA):
529      xs, ys   - starting grid indices (no ghost points)
530      xm, ym   - widths of local grid (no ghost points)
531      */
532     PetscCall(DMDAGetCorners(da,&xs,&ys,NULL,&xm,&ym,NULL));
533 
534     for (j=ys; j<ys+ym; j++) {
535       for (i=xs; i<xs+xm; i++) {
536         if (i == 0 || j == 0 || i == Mx-1 || j == My-1) {
537           /* boundary conditions are all zero Dirichlet */
538           x[j][i] = 0.0;
539         } else {
540           if (B) bij = b[j][i];
541           else   bij = 0.;
542 
543           u  = x[j][i];
544           un = x[j-1][i];
545           us = x[j+1][i];
546           ue = x[j][i-1];
547           uw = x[j][i+1];
548 
549           for (k=0; k<its; k++) {
550             eu  = PetscExpScalar(u);
551             uxx = (2.0*u - ue - uw)*hydhx;
552             uyy = (2.0*u - un - us)*hxdhy;
553             F   = uxx + uyy - sc*eu - bij;
554             if (k == 0) F0 = F;
555             J  = 2.0*(hydhx + hxdhy) - sc*eu;
556             y  = F/J;
557             u -= y;
558             tot_its++;
559 
560             if (atol > PetscAbsReal(PetscRealPart(F)) ||
561                 rtol*PetscAbsReal(PetscRealPart(F0)) > PetscAbsReal(PetscRealPart(F)) ||
562                 stol*PetscAbsReal(PetscRealPart(u)) > PetscAbsReal(PetscRealPart(y))) {
563               break;
564             }
565           }
566           x[j][i] = u;
567         }
568       }
569     }
570     /*
571      Restore vector
572      */
573     PetscCall(DMDAVecRestoreArray(da,localX,&x));
574     PetscCall(DMLocalToGlobalBegin(da,localX,INSERT_VALUES,X));
575     PetscCall(DMLocalToGlobalEnd(da,localX,INSERT_VALUES,X));
576   }
577   PetscCall(PetscLogFlops(tot_its*(21.0)));
578   PetscCall(DMRestoreLocalVector(da,&localX));
579   if (B) {
580     PetscCall(DMDAVecRestoreArray(da,localB,&b));
581     PetscCall(DMRestoreLocalVector(da,&localB));
582   }
583   PetscFunctionReturn(0);
584 }
585 
586 int main(int argc,char **argv)
587 {
588   SNES           snes;                         /* nonlinear solver */
589   Vec            x;                            /* solution vector */
590   AppCtx         user;                         /* user-defined work context */
591   PetscInt       its;                          /* iterations for convergence */
592   PetscReal      bratu_lambda_max = 6.81;
593   PetscReal      bratu_lambda_min = 0.;
594   PetscInt       MMS              = 1;
595   PetscBool      flg              = PETSC_FALSE,setMMS;
596   DM             da;
597   Vec            r               = NULL;
598   KSP            ksp;
599   PetscInt       lits,slits;
600 
601   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
602      Initialize program
603      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
604 
605   PetscFunctionBeginUser;
606   PetscCall(PetscInitialize(&argc,&argv,(char*)0,help));
607 
608   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
609      Initialize problem parameters
610   - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
611   user.param = 6.0;
612   PetscCall(PetscOptionsGetReal(NULL,NULL,"-par",&user.param,NULL));
613   PetscCheck(user.param <= bratu_lambda_max && user.param >= bratu_lambda_min,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Lambda, %g, is out of range, [%g, %g]", (double)user.param, (double)bratu_lambda_min, (double)bratu_lambda_max);
614   PetscCall(PetscOptionsGetInt(NULL,NULL,"-mms",&MMS,&setMMS));
615   if (MMS == 3) {
616     PetscInt mPar = 2, nPar = 1;
617     PetscCall(PetscOptionsGetInt(NULL,NULL,"-m_par",&mPar,NULL));
618     PetscCall(PetscOptionsGetInt(NULL,NULL,"-n_par",&nPar,NULL));
619     user.m = PetscPowInt(2,mPar);
620     user.n = PetscPowInt(2,nPar);
621   }
622 
623   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
624      Create nonlinear solver context
625      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
626   PetscCall(SNESCreate(PETSC_COMM_WORLD,&snes));
627   PetscCall(SNESSetCountersReset(snes,PETSC_FALSE));
628   PetscCall(SNESSetNGS(snes, NonlinearGS, NULL));
629 
630   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
631      Create distributed array (DMDA) to manage parallel grid and vectors
632   - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
633   PetscCall(DMDACreate2d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE,DMDA_STENCIL_STAR,4,4,PETSC_DECIDE,PETSC_DECIDE,1,1,NULL,NULL,&da));
634   PetscCall(DMSetFromOptions(da));
635   PetscCall(DMSetUp(da));
636   PetscCall(DMDASetUniformCoordinates(da, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0));
637   PetscCall(DMSetApplicationContext(da,&user));
638   PetscCall(SNESSetDM(snes,da));
639   /*  - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
640      Extract global vectors from DMDA; then duplicate for remaining
641      vectors that are the same types
642    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
643   PetscCall(DMCreateGlobalVector(da,&x));
644 
645   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
646      Set local function evaluation routine
647   - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
648   switch (MMS) {
649   case 0: user.mms_solution = ZeroBCSolution; user.mms_forcing = NULL; break;
650   case 1: user.mms_solution = MMSSolution1; user.mms_forcing = MMSForcing1; break;
651   case 2: user.mms_solution = MMSSolution2; user.mms_forcing = MMSForcing2; break;
652   case 3: user.mms_solution = MMSSolution3; user.mms_forcing = MMSForcing3; break;
653   case 4: user.mms_solution = MMSSolution4; user.mms_forcing = MMSForcing4; break;
654   default: SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_USER,"Unknown MMS type %" PetscInt_FMT,MMS);
655   }
656   PetscCall(DMDASNESSetFunctionLocal(da,INSERT_VALUES,(DMDASNESFunction)FormFunctionLocal,&user));
657   PetscCall(PetscOptionsGetBool(NULL,NULL,"-fd",&flg,NULL));
658   if (!flg) {
659     PetscCall(DMDASNESSetJacobianLocal(da,(DMDASNESJacobian)FormJacobianLocal,&user));
660   }
661 
662   PetscCall(PetscOptionsGetBool(NULL,NULL,"-obj",&flg,NULL));
663   if (flg) {
664     PetscCall(DMDASNESSetObjectiveLocal(da,(DMDASNESObjective)FormObjectiveLocal,&user));
665   }
666 
667   if (PetscDefined(HAVE_MATLAB_ENGINE)) {
668     PetscBool matlab_function = PETSC_FALSE;
669     PetscCall(PetscOptionsGetBool(NULL,NULL,"-matlab_function",&matlab_function,0));
670     if (matlab_function) {
671       PetscCall(VecDuplicate(x,&r));
672       PetscCall(SNESSetFunction(snes,r,FormFunctionMatlab,&user));
673     }
674   }
675 
676   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
677      Customize nonlinear solver; set runtime options
678    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
679   PetscCall(SNESSetFromOptions(snes));
680 
681   PetscCall(FormInitialGuess(da,&user,x));
682 
683   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
684      Solve nonlinear system
685      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
686   PetscCall(SNESSolve(snes,NULL,x));
687   PetscCall(SNESGetIterationNumber(snes,&its));
688 
689   PetscCall(SNESGetLinearSolveIterations(snes,&slits));
690   PetscCall(SNESGetKSP(snes,&ksp));
691   PetscCall(KSPGetTotalIterations(ksp,&lits));
692   PetscCheck(lits == slits,PETSC_COMM_WORLD,PETSC_ERR_PLIB,"Number of total linear iterations reported by SNES %" PetscInt_FMT " does not match reported by KSP %" PetscInt_FMT,slits,lits);
693   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
694    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
695 
696   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
697      If using MMS, check the l_2 error
698    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
699   if (setMMS) {
700     Vec       e;
701     PetscReal errorl2, errorinf;
702     PetscInt  N;
703 
704     PetscCall(VecDuplicate(x, &e));
705     PetscCall(PetscObjectViewFromOptions((PetscObject) x, NULL, "-sol_view"));
706     PetscCall(FormExactSolution(da, &user, e));
707     PetscCall(PetscObjectViewFromOptions((PetscObject) e, NULL, "-exact_view"));
708     PetscCall(VecAXPY(e, -1.0, x));
709     PetscCall(PetscObjectViewFromOptions((PetscObject) e, NULL, "-error_view"));
710     PetscCall(VecNorm(e, NORM_2, &errorl2));
711     PetscCall(VecNorm(e, NORM_INFINITY, &errorinf));
712     PetscCall(VecGetSize(e, &N));
713     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "N: %" PetscInt_FMT " error L2 %g inf %g\n", N, (double)(errorl2/PetscSqrtReal((PetscReal)N)), (double) errorinf));
714     PetscCall(VecDestroy(&e));
715     PetscCall(PetscLogEventSetDof(SNES_Solve, 0, N));
716     PetscCall(PetscLogEventSetError(SNES_Solve, 0, errorl2/PetscSqrtReal(N)));
717   }
718 
719   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
720      Free work space.  All PETSc objects should be destroyed when they
721      are no longer needed.
722    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
723   PetscCall(VecDestroy(&r));
724   PetscCall(VecDestroy(&x));
725   PetscCall(SNESDestroy(&snes));
726   PetscCall(DMDestroy(&da));
727   PetscCall(PetscFinalize());
728   return 0;
729 }
730 
731 /*TEST
732 
733    test:
734      suffix: asm_0
735      requires: !single
736      args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 2 -pc_asm_overlap 0 -pc_asm_local_type additive -sub_pc_type lu
737 
738    test:
739      suffix: msm_0
740      requires: !single
741      args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 2 -pc_asm_overlap 0 -pc_asm_local_type multiplicative -sub_pc_type lu
742 
743    test:
744      suffix: asm_1
745      requires: !single
746      args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 2 -pc_asm_overlap 0 -pc_asm_local_type additive -sub_pc_type lu -da_grid_x 8
747 
748    test:
749      suffix: msm_1
750      requires: !single
751      args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 2 -pc_asm_overlap 0 -pc_asm_local_type multiplicative -sub_pc_type lu -da_grid_x 8
752 
753    test:
754      suffix: asm_2
755      requires: !single
756      args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 3 -pc_asm_overlap 0 -pc_asm_local_type additive -sub_pc_type lu -da_grid_x 8
757 
758    test:
759      suffix: msm_2
760      requires: !single
761      args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 3 -pc_asm_overlap 0 -pc_asm_local_type multiplicative -sub_pc_type lu -da_grid_x 8
762 
763    test:
764      suffix: asm_3
765      requires: !single
766      args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 4 -pc_asm_overlap 0 -pc_asm_local_type additive -sub_pc_type lu -da_grid_x 8
767 
768    test:
769      suffix: msm_3
770      requires: !single
771      args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 4 -pc_asm_overlap 0 -pc_asm_local_type multiplicative -sub_pc_type lu -da_grid_x 8
772 
773    test:
774      suffix: asm_4
775      requires: !single
776      nsize: 2
777      args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 2 -pc_asm_overlap 0 -pc_asm_local_type additive -sub_pc_type lu -da_grid_x 8
778 
779    test:
780      suffix: msm_4
781      requires: !single
782      nsize: 2
783      args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 2 -pc_asm_overlap 0 -pc_asm_local_type multiplicative -sub_pc_type lu -da_grid_x 8
784 
785    test:
786      suffix: asm_5
787      requires: !single
788      nsize: 2
789      args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 4 -pc_asm_overlap 0 -pc_asm_local_type additive -sub_pc_type lu -da_grid_x 8
790 
791    test:
792      suffix: msm_5
793      requires: !single
794      nsize: 2
795      args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 4 -pc_asm_overlap 0 -pc_asm_local_type multiplicative -sub_pc_type lu -da_grid_x 8
796 
797    test:
798      args: -snes_rtol 1.e-5 -pc_type mg -ksp_monitor_short -snes_view -pc_mg_levels 3 -pc_mg_galerkin pmat -da_grid_x 17 -da_grid_y 17 -mg_levels_ksp_monitor_short -mg_levels_ksp_norm_type unpreconditioned -snes_monitor_short -mg_levels_ksp_chebyshev_esteig 0.5,1.1 -mg_levels_pc_type sor -pc_mg_type full
799      requires: !single
800 
801    test:
802      suffix: 2
803      args: -pc_type mg -ksp_converged_reason -snes_view -pc_mg_galerkin pmat -snes_grid_sequence 3 -mg_levels_ksp_norm_type unpreconditioned -snes_monitor_short -mg_levels_ksp_chebyshev_esteig 0.5,1.1 -mg_levels_pc_type sor -pc_mg_type full -ksp_atol -1.
804 
805    test:
806      suffix: 3
807      nsize: 2
808      args: -snes_grid_sequence 2 -snes_mf_operator -snes_converged_reason -snes_view -pc_type mg -snes_atol -1 -snes_rtol 1.e-2
809      filter: grep -v "otal number of function evaluations"
810 
811    test:
812      suffix: 4
813      nsize: 2
814      args: -snes_grid_sequence 2 -snes_monitor_short -ksp_converged_reason -snes_converged_reason -snes_view -pc_type mg -snes_atol -1 -ksp_atol -1
815 
816    test:
817      suffix: 5_anderson
818      args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type anderson
819 
820    test:
821      suffix: 5_aspin
822      nsize: 4
823      args: -snes_monitor_short -ksp_monitor_short -snes_converged_reason -da_refine 4 -da_overlap 3 -snes_type aspin -snes_view
824 
825    test:
826      suffix: 5_broyden
827      args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type qn -snes_qn_type broyden -snes_qn_m 10
828 
829    test:
830      suffix: 5_fas
831      args: -fas_coarse_snes_max_it 1 -fas_coarse_pc_type lu -fas_coarse_ksp_type preonly -snes_monitor_short -snes_type fas -fas_coarse_ksp_type richardson -da_refine 6
832      requires: !single
833 
834    test:
835      suffix: 5_fas_additive
836      args: -fas_coarse_snes_max_it 1 -fas_coarse_pc_type lu -fas_coarse_ksp_type preonly -snes_monitor_short -snes_type fas -fas_coarse_ksp_type richardson -da_refine 6 -snes_fas_type additive -snes_max_it 50
837 
838    test:
839      suffix: 5_fas_monitor
840      args: -da_refine 1 -snes_type fas -snes_fas_monitor
841      requires: !single
842 
843    test:
844      suffix: 5_ls
845      args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type newtonls
846 
847    test:
848      suffix: 5_ls_sell_sor
849      args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type newtonls -dm_mat_type sell -pc_type sor
850      output_file: output/ex5_5_ls.out
851 
852    test:
853      suffix: 5_nasm
854      nsize: 4
855      args: -snes_monitor_short -snes_converged_reason -da_refine 4 -da_overlap 3 -snes_type nasm -snes_nasm_type restrict -snes_max_it 10
856 
857    test:
858      suffix: 5_ncg
859      args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type ncg -snes_ncg_type fr
860 
861    test:
862      suffix: 5_newton_asm_dmda
863      nsize: 4
864      args: -snes_monitor_short -ksp_monitor_short -snes_converged_reason -da_refine 4 -da_overlap 3 -snes_type newtonls -pc_type asm -pc_asm_dm_subdomains -malloc_dump
865      requires: !single
866 
867    test:
868      suffix: 5_newton_gasm_dmda
869      nsize: 4
870      args: -snes_monitor_short -ksp_monitor_short -snes_converged_reason -da_refine 4 -da_overlap 3 -snes_type newtonls -pc_type gasm -malloc_dump
871      requires: !single
872 
873    test:
874      suffix: 5_ngmres
875      args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type ngmres -snes_ngmres_m 10
876 
877    test:
878      suffix: 5_ngmres_fas
879      args: -snes_rtol 1.e-4 -snes_type ngmres -npc_fas_coarse_snes_max_it 1 -npc_fas_coarse_snes_type newtonls -npc_fas_coarse_pc_type lu -npc_fas_coarse_ksp_type preonly -snes_ngmres_m 10 -snes_monitor_short -npc_snes_max_it 1 -npc_snes_type fas -npc_fas_coarse_ksp_type richardson -da_refine 6
880 
881    test:
882      suffix: 5_ngmres_ngs
883      args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type ngmres -npc_snes_type ngs -npc_snes_max_it 1
884 
885    test:
886      suffix: 5_ngmres_nrichardson
887      args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type ngmres -snes_ngmres_m 10 -npc_snes_type nrichardson -npc_snes_max_it 3
888      output_file: output/ex5_5_ngmres_richardson.out
889 
890    test:
891      suffix: 5_nrichardson
892      args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type nrichardson
893 
894    test:
895      suffix: 5_qn
896      args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type qn -snes_linesearch_type cp -snes_qn_m 10
897 
898    test:
899      suffix: 6
900      nsize: 4
901      args: -snes_converged_reason -ksp_converged_reason -da_grid_x 129 -da_grid_y 129 -pc_type mg -pc_mg_levels 8 -mg_levels_ksp_type chebyshev -mg_levels_ksp_chebyshev_esteig 0,0.5,0,1.1 -mg_levels_ksp_max_it 2
902 
903    test:
904      requires: complex !single
905      suffix: complex
906      args: -snes_mf_operator -mat_mffd_complex -snes_monitor
907 
908 TEST*/
909