1 static char help[] = "Bratu nonlinear PDE in 2d.\n\ 2 We solve the Bratu (SFI - solid fuel ignition) problem in a 2D rectangular\n\ 3 domain, using distributed arrays (DMDAs) to partition the parallel grid.\n\ 4 The command line options include:\n\ 5 -par <parameter>, where <parameter> indicates the problem's nonlinearity\n\ 6 problem SFI: <parameter> = Bratu parameter (0 <= par <= 6.81)\n\n\ 7 -m_par/n_par <parameter>, where <parameter> indicates an integer\n \ 8 that MMS3 will be evaluated with 2^m_par, 2^n_par"; 9 10 /* ------------------------------------------------------------------------ 11 12 Solid Fuel Ignition (SFI) problem. This problem is modeled by 13 the partial differential equation 14 15 -Laplacian u - lambda*exp(u) = 0, 0 < x,y < 1, 16 17 with boundary conditions 18 19 u = 0 for x = 0, x = 1, y = 0, y = 1. 20 21 A finite difference approximation with the usual 5-point stencil 22 is used to discretize the boundary value problem to obtain a nonlinear 23 system of equations. 24 25 This example shows how geometric multigrid can be run transparently with a nonlinear solver so long 26 as SNESSetDM() is provided. Example usage 27 28 ./ex5 -pc_type mg -ksp_monitor -snes_view -pc_mg_levels 3 -pc_mg_galerkin pmat -da_grid_x 17 -da_grid_y 17 29 -mg_levels_ksp_monitor -snes_monitor -mg_levels_pc_type sor -pc_mg_type full 30 31 or to run with grid sequencing on the nonlinear problem (note that you do not need to provide the number of 32 multigrid levels, it will be determined automatically based on the number of refinements done) 33 34 ./ex5 -pc_type mg -ksp_monitor -snes_view -pc_mg_galerkin pmat -snes_grid_sequence 3 35 -mg_levels_ksp_monitor -snes_monitor -mg_levels_pc_type sor -pc_mg_type full 36 37 ------------------------------------------------------------------------- */ 38 39 /* 40 Include "petscdmda.h" so that we can use distributed arrays (DMDAs). 41 Include "petscsnes.h" so that we can use SNES solvers. Note that this 42 */ 43 #include <petscdm.h> 44 #include <petscdmda.h> 45 #include <petscsnes.h> 46 #include <petscmatlab.h> 47 #include <petsc/private/snesimpl.h> /* For SNES_Solve event */ 48 49 /* 50 User-defined application context - contains data needed by the 51 application-provided call-back routines, FormJacobianLocal() and 52 FormFunctionLocal(). 53 */ 54 typedef struct AppCtx AppCtx; 55 struct AppCtx { 56 PetscReal param; /* test problem parameter */ 57 PetscInt m, n; /* MMS3 parameters */ 58 PetscErrorCode (*mms_solution)(AppCtx *, const DMDACoor2d *, PetscScalar *); 59 PetscErrorCode (*mms_forcing)(AppCtx *, const DMDACoor2d *, PetscScalar *); 60 }; 61 62 /* ------------------------------------------------------------------- */ 63 /* 64 FormInitialGuess - Forms initial approximation. 65 66 Input Parameters: 67 da - The DM 68 user - user-defined application context 69 70 Output Parameter: 71 X - vector 72 */ 73 static PetscErrorCode FormInitialGuess(DM da, AppCtx *user, Vec X) 74 { 75 PetscInt i, j, Mx, My, xs, ys, xm, ym; 76 PetscReal lambda, temp1, temp, hx, hy; 77 PetscScalar **x; 78 79 PetscFunctionBeginUser; 80 PetscCall(DMDAGetInfo(da, PETSC_IGNORE, &Mx, &My, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE)); 81 82 lambda = user->param; 83 hx = 1.0 / (PetscReal)(Mx - 1); 84 hy = 1.0 / (PetscReal)(My - 1); 85 temp1 = lambda / (lambda + 1.0); 86 87 /* 88 Get a pointer to vector data. 89 - For default PETSc vectors, VecGetArray() returns a pointer to 90 the data array. Otherwise, the routine is implementation dependent. 91 - You MUST call VecRestoreArray() when you no longer need access to 92 the array. 93 */ 94 PetscCall(DMDAVecGetArray(da, X, &x)); 95 96 /* 97 Get local grid boundaries (for 2-dimensional DMDA): 98 xs, ys - starting grid indices (no ghost points) 99 xm, ym - widths of local grid (no ghost points) 100 101 */ 102 PetscCall(DMDAGetCorners(da, &xs, &ys, NULL, &xm, &ym, NULL)); 103 104 /* 105 Compute initial guess over the locally owned part of the grid 106 */ 107 for (j = ys; j < ys + ym; j++) { 108 temp = (PetscReal)(PetscMin(j, My - j - 1)) * hy; 109 for (i = xs; i < xs + xm; i++) { 110 if (i == 0 || j == 0 || i == Mx - 1 || j == My - 1) { 111 /* boundary conditions are all zero Dirichlet */ 112 x[j][i] = 0.0; 113 } else { 114 x[j][i] = temp1 * PetscSqrtReal(PetscMin((PetscReal)(PetscMin(i, Mx - i - 1)) * hx, temp)); 115 } 116 } 117 } 118 119 /* 120 Restore vector 121 */ 122 PetscCall(DMDAVecRestoreArray(da, X, &x)); 123 PetscFunctionReturn(0); 124 } 125 126 /* 127 FormExactSolution - Forms MMS solution 128 129 Input Parameters: 130 da - The DM 131 user - user-defined application context 132 133 Output Parameter: 134 X - vector 135 */ 136 static PetscErrorCode FormExactSolution(DM da, AppCtx *user, Vec U) 137 { 138 DM coordDA; 139 Vec coordinates; 140 DMDACoor2d **coords; 141 PetscScalar **u; 142 PetscInt xs, ys, xm, ym, i, j; 143 144 PetscFunctionBeginUser; 145 PetscCall(DMDAGetCorners(da, &xs, &ys, NULL, &xm, &ym, NULL)); 146 PetscCall(DMGetCoordinateDM(da, &coordDA)); 147 PetscCall(DMGetCoordinates(da, &coordinates)); 148 PetscCall(DMDAVecGetArray(coordDA, coordinates, &coords)); 149 PetscCall(DMDAVecGetArray(da, U, &u)); 150 for (j = ys; j < ys + ym; ++j) { 151 for (i = xs; i < xs + xm; ++i) user->mms_solution(user, &coords[j][i], &u[j][i]); 152 } 153 PetscCall(DMDAVecRestoreArray(da, U, &u)); 154 PetscCall(DMDAVecRestoreArray(coordDA, coordinates, &coords)); 155 PetscFunctionReturn(0); 156 } 157 158 static PetscErrorCode ZeroBCSolution(AppCtx *user, const DMDACoor2d *c, PetscScalar *u) 159 { 160 u[0] = 0.; 161 return 0; 162 } 163 164 /* The functions below evaluate the MMS solution u(x,y) and associated forcing 165 166 f(x,y) = -u_xx - u_yy - lambda exp(u) 167 168 such that u(x,y) is an exact solution with f(x,y) as the right hand side forcing term. 169 */ 170 static PetscErrorCode MMSSolution1(AppCtx *user, const DMDACoor2d *c, PetscScalar *u) 171 { 172 PetscReal x = PetscRealPart(c->x), y = PetscRealPart(c->y); 173 u[0] = x * (1 - x) * y * (1 - y); 174 PetscLogFlops(5); 175 return 0; 176 } 177 static PetscErrorCode MMSForcing1(AppCtx *user, const DMDACoor2d *c, PetscScalar *f) 178 { 179 PetscReal x = PetscRealPart(c->x), y = PetscRealPart(c->y); 180 f[0] = 2 * x * (1 - x) + 2 * y * (1 - y) - user->param * PetscExpReal(x * (1 - x) * y * (1 - y)); 181 return 0; 182 } 183 184 static PetscErrorCode MMSSolution2(AppCtx *user, const DMDACoor2d *c, PetscScalar *u) 185 { 186 PetscReal x = PetscRealPart(c->x), y = PetscRealPart(c->y); 187 u[0] = PetscSinReal(PETSC_PI * x) * PetscSinReal(PETSC_PI * y); 188 PetscLogFlops(5); 189 return 0; 190 } 191 static PetscErrorCode MMSForcing2(AppCtx *user, const DMDACoor2d *c, PetscScalar *f) 192 { 193 PetscReal x = PetscRealPart(c->x), y = PetscRealPart(c->y); 194 f[0] = 2 * PetscSqr(PETSC_PI) * PetscSinReal(PETSC_PI * x) * PetscSinReal(PETSC_PI * y) - user->param * PetscExpReal(PetscSinReal(PETSC_PI * x) * PetscSinReal(PETSC_PI * y)); 195 return 0; 196 } 197 198 static PetscErrorCode MMSSolution3(AppCtx *user, const DMDACoor2d *c, PetscScalar *u) 199 { 200 PetscReal x = PetscRealPart(c->x), y = PetscRealPart(c->y); 201 u[0] = PetscSinReal(user->m * PETSC_PI * x * (1 - y)) * PetscSinReal(user->n * PETSC_PI * y * (1 - x)); 202 PetscLogFlops(5); 203 return 0; 204 } 205 static PetscErrorCode MMSForcing3(AppCtx *user, const DMDACoor2d *c, PetscScalar *f) 206 { 207 PetscReal x = PetscRealPart(c->x), y = PetscRealPart(c->y); 208 PetscReal m = user->m, n = user->n, lambda = user->param; 209 f[0] = (-(PetscExpReal(PetscSinReal(m * PETSC_PI * x * (1 - y)) * PetscSinReal(n * PETSC_PI * (1 - x) * y)) * lambda) + PetscSqr(PETSC_PI) * (-2 * m * n * ((-1 + x) * x + (-1 + y) * y) * PetscCosReal(m * PETSC_PI * x * (-1 + y)) * PetscCosReal(n * PETSC_PI * (-1 + x) * y) + (PetscSqr(m) * (PetscSqr(x) + PetscSqr(-1 + y)) + PetscSqr(n) * (PetscSqr(-1 + x) + PetscSqr(y))) * PetscSinReal(m * PETSC_PI * x * (-1 + y)) * PetscSinReal(n * PETSC_PI * (-1 + x) * y))); 210 return 0; 211 } 212 213 static PetscErrorCode MMSSolution4(AppCtx *user, const DMDACoor2d *c, PetscScalar *u) 214 { 215 const PetscReal Lx = 1., Ly = 1.; 216 PetscReal x = PetscRealPart(c->x), y = PetscRealPart(c->y); 217 u[0] = (PetscPowReal(x, 4) - PetscSqr(Lx) * PetscSqr(x)) * (PetscPowReal(y, 4) - PetscSqr(Ly) * PetscSqr(y)); 218 PetscLogFlops(9); 219 return 0; 220 } 221 static PetscErrorCode MMSForcing4(AppCtx *user, const DMDACoor2d *c, PetscScalar *f) 222 { 223 const PetscReal Lx = 1., Ly = 1.; 224 PetscReal x = PetscRealPart(c->x), y = PetscRealPart(c->y); 225 f[0] = (2 * PetscSqr(x) * (PetscSqr(x) - PetscSqr(Lx)) * (PetscSqr(Ly) - 6 * PetscSqr(y)) + 2 * PetscSqr(y) * (PetscSqr(Lx) - 6 * PetscSqr(x)) * (PetscSqr(y) - PetscSqr(Ly)) - user->param * PetscExpReal((PetscPowReal(x, 4) - PetscSqr(Lx) * PetscSqr(x)) * (PetscPowReal(y, 4) - PetscSqr(Ly) * PetscSqr(y)))); 226 return 0; 227 } 228 229 /* ------------------------------------------------------------------- */ 230 /* 231 FormFunctionLocal - Evaluates nonlinear function, F(x) on local process patch 232 233 */ 234 static PetscErrorCode FormFunctionLocal(DMDALocalInfo *info, PetscScalar **x, PetscScalar **f, AppCtx *user) 235 { 236 PetscInt i, j; 237 PetscReal lambda, hx, hy, hxdhy, hydhx; 238 PetscScalar u, ue, uw, un, us, uxx, uyy, mms_solution, mms_forcing; 239 DMDACoor2d c; 240 241 PetscFunctionBeginUser; 242 lambda = user->param; 243 hx = 1.0 / (PetscReal)(info->mx - 1); 244 hy = 1.0 / (PetscReal)(info->my - 1); 245 hxdhy = hx / hy; 246 hydhx = hy / hx; 247 /* 248 Compute function over the locally owned part of the grid 249 */ 250 for (j = info->ys; j < info->ys + info->ym; j++) { 251 for (i = info->xs; i < info->xs + info->xm; i++) { 252 if (i == 0 || j == 0 || i == info->mx - 1 || j == info->my - 1) { 253 c.x = i * hx; 254 c.y = j * hy; 255 PetscCall(user->mms_solution(user, &c, &mms_solution)); 256 f[j][i] = 2.0 * (hydhx + hxdhy) * (x[j][i] - mms_solution); 257 } else { 258 u = x[j][i]; 259 uw = x[j][i - 1]; 260 ue = x[j][i + 1]; 261 un = x[j - 1][i]; 262 us = x[j + 1][i]; 263 264 /* Enforce boundary conditions at neighboring points -- setting these values causes the Jacobian to be symmetric. */ 265 if (i - 1 == 0) { 266 c.x = (i - 1) * hx; 267 c.y = j * hy; 268 PetscCall(user->mms_solution(user, &c, &uw)); 269 } 270 if (i + 1 == info->mx - 1) { 271 c.x = (i + 1) * hx; 272 c.y = j * hy; 273 PetscCall(user->mms_solution(user, &c, &ue)); 274 } 275 if (j - 1 == 0) { 276 c.x = i * hx; 277 c.y = (j - 1) * hy; 278 PetscCall(user->mms_solution(user, &c, &un)); 279 } 280 if (j + 1 == info->my - 1) { 281 c.x = i * hx; 282 c.y = (j + 1) * hy; 283 PetscCall(user->mms_solution(user, &c, &us)); 284 } 285 286 uxx = (2.0 * u - uw - ue) * hydhx; 287 uyy = (2.0 * u - un - us) * hxdhy; 288 mms_forcing = 0; 289 c.x = i * hx; 290 c.y = j * hy; 291 if (user->mms_forcing) PetscCall(user->mms_forcing(user, &c, &mms_forcing)); 292 f[j][i] = uxx + uyy - hx * hy * (lambda * PetscExpScalar(u) + mms_forcing); 293 } 294 } 295 } 296 PetscCall(PetscLogFlops(11.0 * info->ym * info->xm)); 297 PetscFunctionReturn(0); 298 } 299 300 /* FormObjectiveLocal - Evaluates nonlinear function, F(x) on local process patch */ 301 static PetscErrorCode FormObjectiveLocal(DMDALocalInfo *info, PetscScalar **x, PetscReal *obj, AppCtx *user) 302 { 303 PetscInt i, j; 304 PetscReal lambda, hx, hy, hxdhy, hydhx, sc, lobj = 0; 305 PetscScalar u, ue, uw, un, us, uxux, uyuy; 306 MPI_Comm comm; 307 308 PetscFunctionBeginUser; 309 *obj = 0; 310 PetscCall(PetscObjectGetComm((PetscObject)info->da, &comm)); 311 lambda = user->param; 312 hx = 1.0 / (PetscReal)(info->mx - 1); 313 hy = 1.0 / (PetscReal)(info->my - 1); 314 sc = hx * hy * lambda; 315 hxdhy = hx / hy; 316 hydhx = hy / hx; 317 /* 318 Compute function over the locally owned part of the grid 319 */ 320 for (j = info->ys; j < info->ys + info->ym; j++) { 321 for (i = info->xs; i < info->xs + info->xm; i++) { 322 if (i == 0 || j == 0 || i == info->mx - 1 || j == info->my - 1) { 323 lobj += PetscRealPart((hydhx + hxdhy) * x[j][i] * x[j][i]); 324 } else { 325 u = x[j][i]; 326 uw = x[j][i - 1]; 327 ue = x[j][i + 1]; 328 un = x[j - 1][i]; 329 us = x[j + 1][i]; 330 331 if (i - 1 == 0) uw = 0.; 332 if (i + 1 == info->mx - 1) ue = 0.; 333 if (j - 1 == 0) un = 0.; 334 if (j + 1 == info->my - 1) us = 0.; 335 336 /* F[u] = 1/2\int_{\omega}\nabla^2u(x)*u(x)*dx */ 337 338 uxux = u * (2. * u - ue - uw) * hydhx; 339 uyuy = u * (2. * u - un - us) * hxdhy; 340 341 lobj += PetscRealPart(0.5 * (uxux + uyuy) - sc * PetscExpScalar(u)); 342 } 343 } 344 } 345 PetscCall(PetscLogFlops(12.0 * info->ym * info->xm)); 346 PetscCallMPI(MPI_Allreduce(&lobj, obj, 1, MPIU_REAL, MPIU_SUM, comm)); 347 PetscFunctionReturn(0); 348 } 349 350 /* 351 FormJacobianLocal - Evaluates Jacobian matrix on local process patch 352 */ 353 static PetscErrorCode FormJacobianLocal(DMDALocalInfo *info, PetscScalar **x, Mat jac, Mat jacpre, AppCtx *user) 354 { 355 PetscInt i, j, k; 356 MatStencil col[5], row; 357 PetscScalar lambda, v[5], hx, hy, hxdhy, hydhx, sc; 358 DM coordDA; 359 Vec coordinates; 360 DMDACoor2d **coords; 361 362 PetscFunctionBeginUser; 363 lambda = user->param; 364 /* Extract coordinates */ 365 PetscCall(DMGetCoordinateDM(info->da, &coordDA)); 366 PetscCall(DMGetCoordinates(info->da, &coordinates)); 367 PetscCall(DMDAVecGetArray(coordDA, coordinates, &coords)); 368 hx = info->xm > 1 ? PetscRealPart(coords[info->ys][info->xs + 1].x) - PetscRealPart(coords[info->ys][info->xs].x) : 1.0; 369 hy = info->ym > 1 ? PetscRealPart(coords[info->ys + 1][info->xs].y) - PetscRealPart(coords[info->ys][info->xs].y) : 1.0; 370 PetscCall(DMDAVecRestoreArray(coordDA, coordinates, &coords)); 371 hxdhy = hx / hy; 372 hydhx = hy / hx; 373 sc = hx * hy * lambda; 374 375 /* 376 Compute entries for the locally owned part of the Jacobian. 377 - Currently, all PETSc parallel matrix formats are partitioned by 378 contiguous chunks of rows across the processors. 379 - Each processor needs to insert only elements that it owns 380 locally (but any non-local elements will be sent to the 381 appropriate processor during matrix assembly). 382 - Here, we set all entries for a particular row at once. 383 - We can set matrix entries either using either 384 MatSetValuesLocal() or MatSetValues(), as discussed above. 385 */ 386 for (j = info->ys; j < info->ys + info->ym; j++) { 387 for (i = info->xs; i < info->xs + info->xm; i++) { 388 row.j = j; 389 row.i = i; 390 /* boundary points */ 391 if (i == 0 || j == 0 || i == info->mx - 1 || j == info->my - 1) { 392 v[0] = 2.0 * (hydhx + hxdhy); 393 PetscCall(MatSetValuesStencil(jacpre, 1, &row, 1, &row, v, INSERT_VALUES)); 394 } else { 395 k = 0; 396 /* interior grid points */ 397 if (j - 1 != 0) { 398 v[k] = -hxdhy; 399 col[k].j = j - 1; 400 col[k].i = i; 401 k++; 402 } 403 if (i - 1 != 0) { 404 v[k] = -hydhx; 405 col[k].j = j; 406 col[k].i = i - 1; 407 k++; 408 } 409 410 v[k] = 2.0 * (hydhx + hxdhy) - sc * PetscExpScalar(x[j][i]); 411 col[k].j = row.j; 412 col[k].i = row.i; 413 k++; 414 415 if (i + 1 != info->mx - 1) { 416 v[k] = -hydhx; 417 col[k].j = j; 418 col[k].i = i + 1; 419 k++; 420 } 421 if (j + 1 != info->mx - 1) { 422 v[k] = -hxdhy; 423 col[k].j = j + 1; 424 col[k].i = i; 425 k++; 426 } 427 PetscCall(MatSetValuesStencil(jacpre, 1, &row, k, col, v, INSERT_VALUES)); 428 } 429 } 430 } 431 432 /* 433 Assemble matrix, using the 2-step process: 434 MatAssemblyBegin(), MatAssemblyEnd(). 435 */ 436 PetscCall(MatAssemblyBegin(jacpre, MAT_FINAL_ASSEMBLY)); 437 PetscCall(MatAssemblyEnd(jacpre, MAT_FINAL_ASSEMBLY)); 438 /* 439 Tell the matrix we will never add a new nonzero location to the 440 matrix. If we do, it will generate an error. 441 */ 442 PetscCall(MatSetOption(jac, MAT_NEW_NONZERO_LOCATION_ERR, PETSC_TRUE)); 443 PetscFunctionReturn(0); 444 } 445 446 static PetscErrorCode FormFunctionMatlab(SNES snes, Vec X, Vec F, void *ptr) 447 { 448 #if PetscDefined(HAVE_MATLAB) 449 AppCtx *user = (AppCtx *)ptr; 450 PetscInt Mx, My; 451 PetscReal lambda, hx, hy; 452 Vec localX, localF; 453 MPI_Comm comm; 454 DM da; 455 456 PetscFunctionBeginUser; 457 PetscCall(SNESGetDM(snes, &da)); 458 PetscCall(DMGetLocalVector(da, &localX)); 459 PetscCall(DMGetLocalVector(da, &localF)); 460 PetscCall(PetscObjectSetName((PetscObject)localX, "localX")); 461 PetscCall(PetscObjectSetName((PetscObject)localF, "localF")); 462 PetscCall(DMDAGetInfo(da, PETSC_IGNORE, &Mx, &My, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE)); 463 464 lambda = user->param; 465 hx = 1.0 / (PetscReal)(Mx - 1); 466 hy = 1.0 / (PetscReal)(My - 1); 467 468 PetscCall(PetscObjectGetComm((PetscObject)snes, &comm)); 469 /* 470 Scatter ghost points to local vector,using the 2-step process 471 DMGlobalToLocalBegin(),DMGlobalToLocalEnd(). 472 By placing code between these two statements, computations can be 473 done while messages are in transition. 474 */ 475 PetscCall(DMGlobalToLocalBegin(da, X, INSERT_VALUES, localX)); 476 PetscCall(DMGlobalToLocalEnd(da, X, INSERT_VALUES, localX)); 477 PetscCall(PetscMatlabEnginePut(PETSC_MATLAB_ENGINE_(comm), (PetscObject)localX)); 478 PetscCall(PetscMatlabEngineEvaluate(PETSC_MATLAB_ENGINE_(comm), "localF=ex5m(localX,%18.16e,%18.16e,%18.16e)", (double)hx, (double)hy, (double)lambda)); 479 PetscCall(PetscMatlabEngineGet(PETSC_MATLAB_ENGINE_(comm), (PetscObject)localF)); 480 481 /* 482 Insert values into global vector 483 */ 484 PetscCall(DMLocalToGlobalBegin(da, localF, INSERT_VALUES, F)); 485 PetscCall(DMLocalToGlobalEnd(da, localF, INSERT_VALUES, F)); 486 PetscCall(DMRestoreLocalVector(da, &localX)); 487 PetscCall(DMRestoreLocalVector(da, &localF)); 488 PetscFunctionReturn(0); 489 #else 490 return 0; /* Never called */ 491 #endif 492 } 493 494 /* ------------------------------------------------------------------- */ 495 /* 496 Applies some sweeps on nonlinear Gauss-Seidel on each process 497 498 */ 499 static PetscErrorCode NonlinearGS(SNES snes, Vec X, Vec B, void *ctx) 500 { 501 PetscInt i, j, k, Mx, My, xs, ys, xm, ym, its, tot_its, sweeps, l; 502 PetscReal lambda, hx, hy, hxdhy, hydhx, sc; 503 PetscScalar **x, **b, bij, F, F0 = 0, J, u, un, us, ue, eu, uw, uxx, uyy, y; 504 PetscReal atol, rtol, stol; 505 DM da; 506 AppCtx *user; 507 Vec localX, localB; 508 509 PetscFunctionBeginUser; 510 tot_its = 0; 511 PetscCall(SNESNGSGetSweeps(snes, &sweeps)); 512 PetscCall(SNESNGSGetTolerances(snes, &atol, &rtol, &stol, &its)); 513 PetscCall(SNESGetDM(snes, &da)); 514 PetscCall(DMGetApplicationContext(da, &user)); 515 516 PetscCall(DMDAGetInfo(da, PETSC_IGNORE, &Mx, &My, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE)); 517 518 lambda = user->param; 519 hx = 1.0 / (PetscReal)(Mx - 1); 520 hy = 1.0 / (PetscReal)(My - 1); 521 sc = hx * hy * lambda; 522 hxdhy = hx / hy; 523 hydhx = hy / hx; 524 525 PetscCall(DMGetLocalVector(da, &localX)); 526 if (B) PetscCall(DMGetLocalVector(da, &localB)); 527 for (l = 0; l < sweeps; l++) { 528 PetscCall(DMGlobalToLocalBegin(da, X, INSERT_VALUES, localX)); 529 PetscCall(DMGlobalToLocalEnd(da, X, INSERT_VALUES, localX)); 530 if (B) { 531 PetscCall(DMGlobalToLocalBegin(da, B, INSERT_VALUES, localB)); 532 PetscCall(DMGlobalToLocalEnd(da, B, INSERT_VALUES, localB)); 533 } 534 /* 535 Get a pointer to vector data. 536 - For default PETSc vectors, VecGetArray() returns a pointer to 537 the data array. Otherwise, the routine is implementation dependent. 538 - You MUST call VecRestoreArray() when you no longer need access to 539 the array. 540 */ 541 PetscCall(DMDAVecGetArray(da, localX, &x)); 542 if (B) PetscCall(DMDAVecGetArray(da, localB, &b)); 543 /* 544 Get local grid boundaries (for 2-dimensional DMDA): 545 xs, ys - starting grid indices (no ghost points) 546 xm, ym - widths of local grid (no ghost points) 547 */ 548 PetscCall(DMDAGetCorners(da, &xs, &ys, NULL, &xm, &ym, NULL)); 549 550 for (j = ys; j < ys + ym; j++) { 551 for (i = xs; i < xs + xm; i++) { 552 if (i == 0 || j == 0 || i == Mx - 1 || j == My - 1) { 553 /* boundary conditions are all zero Dirichlet */ 554 x[j][i] = 0.0; 555 } else { 556 if (B) bij = b[j][i]; 557 else bij = 0.; 558 559 u = x[j][i]; 560 un = x[j - 1][i]; 561 us = x[j + 1][i]; 562 ue = x[j][i - 1]; 563 uw = x[j][i + 1]; 564 565 for (k = 0; k < its; k++) { 566 eu = PetscExpScalar(u); 567 uxx = (2.0 * u - ue - uw) * hydhx; 568 uyy = (2.0 * u - un - us) * hxdhy; 569 F = uxx + uyy - sc * eu - bij; 570 if (k == 0) F0 = F; 571 J = 2.0 * (hydhx + hxdhy) - sc * eu; 572 y = F / J; 573 u -= y; 574 tot_its++; 575 576 if (atol > PetscAbsReal(PetscRealPart(F)) || rtol * PetscAbsReal(PetscRealPart(F0)) > PetscAbsReal(PetscRealPart(F)) || stol * PetscAbsReal(PetscRealPart(u)) > PetscAbsReal(PetscRealPart(y))) break; 577 } 578 x[j][i] = u; 579 } 580 } 581 } 582 /* 583 Restore vector 584 */ 585 PetscCall(DMDAVecRestoreArray(da, localX, &x)); 586 PetscCall(DMLocalToGlobalBegin(da, localX, INSERT_VALUES, X)); 587 PetscCall(DMLocalToGlobalEnd(da, localX, INSERT_VALUES, X)); 588 } 589 PetscCall(PetscLogFlops(tot_its * (21.0))); 590 PetscCall(DMRestoreLocalVector(da, &localX)); 591 if (B) { 592 PetscCall(DMDAVecRestoreArray(da, localB, &b)); 593 PetscCall(DMRestoreLocalVector(da, &localB)); 594 } 595 PetscFunctionReturn(0); 596 } 597 598 int main(int argc, char **argv) 599 { 600 SNES snes; /* nonlinear solver */ 601 Vec x; /* solution vector */ 602 AppCtx user; /* user-defined work context */ 603 PetscInt its; /* iterations for convergence */ 604 PetscReal bratu_lambda_max = 6.81; 605 PetscReal bratu_lambda_min = 0.; 606 PetscInt MMS = 1; 607 PetscBool flg = PETSC_FALSE, setMMS; 608 DM da; 609 Vec r = NULL; 610 KSP ksp; 611 PetscInt lits, slits; 612 613 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 614 Initialize program 615 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 616 617 PetscFunctionBeginUser; 618 PetscCall(PetscInitialize(&argc, &argv, (char *)0, help)); 619 620 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 621 Initialize problem parameters 622 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 623 user.param = 6.0; 624 PetscCall(PetscOptionsGetReal(NULL, NULL, "-par", &user.param, NULL)); 625 PetscCheck(user.param <= bratu_lambda_max && user.param >= bratu_lambda_min, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Lambda, %g, is out of range, [%g, %g]", (double)user.param, (double)bratu_lambda_min, (double)bratu_lambda_max); 626 PetscCall(PetscOptionsGetInt(NULL, NULL, "-mms", &MMS, &setMMS)); 627 if (MMS == 3) { 628 PetscInt mPar = 2, nPar = 1; 629 PetscCall(PetscOptionsGetInt(NULL, NULL, "-m_par", &mPar, NULL)); 630 PetscCall(PetscOptionsGetInt(NULL, NULL, "-n_par", &nPar, NULL)); 631 user.m = PetscPowInt(2, mPar); 632 user.n = PetscPowInt(2, nPar); 633 } 634 635 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 636 Create nonlinear solver context 637 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 638 PetscCall(SNESCreate(PETSC_COMM_WORLD, &snes)); 639 PetscCall(SNESSetCountersReset(snes, PETSC_FALSE)); 640 PetscCall(SNESSetNGS(snes, NonlinearGS, NULL)); 641 642 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 643 Create distributed array (DMDA) to manage parallel grid and vectors 644 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 645 PetscCall(DMDACreate2d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DMDA_STENCIL_STAR, 4, 4, PETSC_DECIDE, PETSC_DECIDE, 1, 1, NULL, NULL, &da)); 646 PetscCall(DMSetFromOptions(da)); 647 PetscCall(DMSetUp(da)); 648 PetscCall(DMDASetUniformCoordinates(da, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0)); 649 PetscCall(DMSetApplicationContext(da, &user)); 650 PetscCall(SNESSetDM(snes, da)); 651 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 652 Extract global vectors from DMDA; then duplicate for remaining 653 vectors that are the same types 654 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 655 PetscCall(DMCreateGlobalVector(da, &x)); 656 657 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 658 Set local function evaluation routine 659 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 660 switch (MMS) { 661 case 0: 662 user.mms_solution = ZeroBCSolution; 663 user.mms_forcing = NULL; 664 break; 665 case 1: 666 user.mms_solution = MMSSolution1; 667 user.mms_forcing = MMSForcing1; 668 break; 669 case 2: 670 user.mms_solution = MMSSolution2; 671 user.mms_forcing = MMSForcing2; 672 break; 673 case 3: 674 user.mms_solution = MMSSolution3; 675 user.mms_forcing = MMSForcing3; 676 break; 677 case 4: 678 user.mms_solution = MMSSolution4; 679 user.mms_forcing = MMSForcing4; 680 break; 681 default: 682 SETERRQ(PETSC_COMM_WORLD, PETSC_ERR_USER, "Unknown MMS type %" PetscInt_FMT, MMS); 683 } 684 PetscCall(DMDASNESSetFunctionLocal(da, INSERT_VALUES, (DMDASNESFunction)FormFunctionLocal, &user)); 685 PetscCall(PetscOptionsGetBool(NULL, NULL, "-fd", &flg, NULL)); 686 if (!flg) PetscCall(DMDASNESSetJacobianLocal(da, (DMDASNESJacobian)FormJacobianLocal, &user)); 687 688 PetscCall(PetscOptionsGetBool(NULL, NULL, "-obj", &flg, NULL)); 689 if (flg) PetscCall(DMDASNESSetObjectiveLocal(da, (DMDASNESObjective)FormObjectiveLocal, &user)); 690 691 if (PetscDefined(HAVE_MATLAB)) { 692 PetscBool matlab_function = PETSC_FALSE; 693 PetscCall(PetscOptionsGetBool(NULL, NULL, "-matlab_function", &matlab_function, 0)); 694 if (matlab_function) { 695 PetscCall(VecDuplicate(x, &r)); 696 PetscCall(SNESSetFunction(snes, r, FormFunctionMatlab, &user)); 697 } 698 } 699 700 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 701 Customize nonlinear solver; set runtime options 702 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 703 PetscCall(SNESSetFromOptions(snes)); 704 705 PetscCall(FormInitialGuess(da, &user, x)); 706 707 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 708 Solve nonlinear system 709 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 710 PetscCall(SNESSolve(snes, NULL, x)); 711 PetscCall(SNESGetIterationNumber(snes, &its)); 712 713 PetscCall(SNESGetLinearSolveIterations(snes, &slits)); 714 PetscCall(SNESGetKSP(snes, &ksp)); 715 PetscCall(KSPGetTotalIterations(ksp, &lits)); 716 PetscCheck(lits == slits, PETSC_COMM_WORLD, PETSC_ERR_PLIB, "Number of total linear iterations reported by SNES %" PetscInt_FMT " does not match reported by KSP %" PetscInt_FMT, slits, lits); 717 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 718 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 719 720 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 721 If using MMS, check the l_2 error 722 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 723 if (setMMS) { 724 Vec e; 725 PetscReal errorl2, errorinf; 726 PetscInt N; 727 728 PetscCall(VecDuplicate(x, &e)); 729 PetscCall(PetscObjectViewFromOptions((PetscObject)x, NULL, "-sol_view")); 730 PetscCall(FormExactSolution(da, &user, e)); 731 PetscCall(PetscObjectViewFromOptions((PetscObject)e, NULL, "-exact_view")); 732 PetscCall(VecAXPY(e, -1.0, x)); 733 PetscCall(PetscObjectViewFromOptions((PetscObject)e, NULL, "-error_view")); 734 PetscCall(VecNorm(e, NORM_2, &errorl2)); 735 PetscCall(VecNorm(e, NORM_INFINITY, &errorinf)); 736 PetscCall(VecGetSize(e, &N)); 737 PetscCall(PetscPrintf(PETSC_COMM_WORLD, "N: %" PetscInt_FMT " error L2 %g inf %g\n", N, (double)(errorl2 / PetscSqrtReal((PetscReal)N)), (double)errorinf)); 738 PetscCall(VecDestroy(&e)); 739 PetscCall(PetscLogEventSetDof(SNES_Solve, 0, N)); 740 PetscCall(PetscLogEventSetError(SNES_Solve, 0, errorl2 / PetscSqrtReal(N))); 741 } 742 743 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 744 Free work space. All PETSc objects should be destroyed when they 745 are no longer needed. 746 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 747 PetscCall(VecDestroy(&r)); 748 PetscCall(VecDestroy(&x)); 749 PetscCall(SNESDestroy(&snes)); 750 PetscCall(DMDestroy(&da)); 751 PetscCall(PetscFinalize()); 752 return 0; 753 } 754 755 /*TEST 756 757 test: 758 suffix: asm_0 759 requires: !single 760 args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 2 -pc_asm_overlap 0 -pc_asm_local_type additive -sub_pc_type lu 761 762 test: 763 suffix: msm_0 764 requires: !single 765 args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 2 -pc_asm_overlap 0 -pc_asm_local_type multiplicative -sub_pc_type lu 766 767 test: 768 suffix: asm_1 769 requires: !single 770 args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 2 -pc_asm_overlap 0 -pc_asm_local_type additive -sub_pc_type lu -da_grid_x 8 771 772 test: 773 suffix: msm_1 774 requires: !single 775 args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 2 -pc_asm_overlap 0 -pc_asm_local_type multiplicative -sub_pc_type lu -da_grid_x 8 776 777 test: 778 suffix: asm_2 779 requires: !single 780 args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 3 -pc_asm_overlap 0 -pc_asm_local_type additive -sub_pc_type lu -da_grid_x 8 781 782 test: 783 suffix: msm_2 784 requires: !single 785 args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 3 -pc_asm_overlap 0 -pc_asm_local_type multiplicative -sub_pc_type lu -da_grid_x 8 786 787 test: 788 suffix: asm_3 789 requires: !single 790 args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 4 -pc_asm_overlap 0 -pc_asm_local_type additive -sub_pc_type lu -da_grid_x 8 791 792 test: 793 suffix: msm_3 794 requires: !single 795 args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 4 -pc_asm_overlap 0 -pc_asm_local_type multiplicative -sub_pc_type lu -da_grid_x 8 796 797 test: 798 suffix: asm_4 799 requires: !single 800 nsize: 2 801 args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 2 -pc_asm_overlap 0 -pc_asm_local_type additive -sub_pc_type lu -da_grid_x 8 802 803 test: 804 suffix: msm_4 805 requires: !single 806 nsize: 2 807 args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 2 -pc_asm_overlap 0 -pc_asm_local_type multiplicative -sub_pc_type lu -da_grid_x 8 808 809 test: 810 suffix: asm_5 811 requires: !single 812 nsize: 2 813 args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 4 -pc_asm_overlap 0 -pc_asm_local_type additive -sub_pc_type lu -da_grid_x 8 814 815 test: 816 suffix: msm_5 817 requires: !single 818 nsize: 2 819 args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 4 -pc_asm_overlap 0 -pc_asm_local_type multiplicative -sub_pc_type lu -da_grid_x 8 820 821 test: 822 args: -snes_rtol 1.e-5 -pc_type mg -ksp_monitor_short -snes_view -pc_mg_levels 3 -pc_mg_galerkin pmat -da_grid_x 17 -da_grid_y 17 -mg_levels_ksp_monitor_short -mg_levels_ksp_norm_type unpreconditioned -snes_monitor_short -mg_levels_ksp_chebyshev_esteig 0.5,1.1 -mg_levels_pc_type sor -pc_mg_type full 823 requires: !single 824 825 test: 826 suffix: 2 827 args: -pc_type mg -ksp_converged_reason -snes_view -pc_mg_galerkin pmat -snes_grid_sequence 3 -mg_levels_ksp_norm_type unpreconditioned -snes_monitor_short -mg_levels_ksp_chebyshev_esteig 0.5,1.1 -mg_levels_pc_type sor -pc_mg_type full -ksp_atol -1. 828 829 test: 830 suffix: 3 831 nsize: 2 832 args: -snes_grid_sequence 2 -snes_mf_operator -snes_converged_reason -snes_view -pc_type mg -snes_atol -1 -snes_rtol 1.e-2 833 filter: grep -v "otal number of function evaluations" 834 835 test: 836 suffix: 4 837 nsize: 2 838 args: -snes_grid_sequence 2 -snes_monitor_short -ksp_converged_reason -snes_converged_reason -snes_view -pc_type mg -snes_atol -1 -ksp_atol -1 839 840 test: 841 suffix: 5_anderson 842 args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type anderson 843 844 test: 845 suffix: 5_aspin 846 nsize: 4 847 args: -snes_monitor_short -ksp_monitor_short -snes_converged_reason -da_refine 4 -da_overlap 3 -snes_type aspin -snes_view 848 849 test: 850 suffix: 5_broyden 851 args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type qn -snes_qn_type broyden -snes_qn_m 10 852 853 test: 854 suffix: 5_fas 855 args: -fas_coarse_snes_max_it 1 -fas_coarse_pc_type lu -fas_coarse_ksp_type preonly -snes_monitor_short -snes_type fas -fas_coarse_ksp_type richardson -da_refine 6 856 requires: !single 857 858 test: 859 suffix: 5_fas_additive 860 args: -fas_coarse_snes_max_it 1 -fas_coarse_pc_type lu -fas_coarse_ksp_type preonly -snes_monitor_short -snes_type fas -fas_coarse_ksp_type richardson -da_refine 6 -snes_fas_type additive -snes_max_it 50 861 862 test: 863 suffix: 5_fas_monitor 864 args: -da_refine 1 -snes_type fas -snes_fas_monitor 865 requires: !single 866 867 test: 868 suffix: 5_ls 869 args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type newtonls 870 871 test: 872 suffix: 5_ls_sell_sor 873 args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type newtonls -dm_mat_type sell -pc_type sor 874 output_file: output/ex5_5_ls.out 875 876 test: 877 suffix: 5_nasm 878 nsize: 4 879 args: -snes_monitor_short -snes_converged_reason -da_refine 4 -da_overlap 3 -snes_type nasm -snes_nasm_type restrict -snes_max_it 10 880 881 test: 882 suffix: 5_ncg 883 args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type ncg -snes_ncg_type fr 884 885 test: 886 suffix: 5_newton_asm_dmda 887 nsize: 4 888 args: -snes_monitor_short -ksp_monitor_short -snes_converged_reason -da_refine 4 -da_overlap 3 -snes_type newtonls -pc_type asm -pc_asm_dm_subdomains -malloc_dump 889 requires: !single 890 891 test: 892 suffix: 5_newton_gasm_dmda 893 nsize: 4 894 args: -snes_monitor_short -ksp_monitor_short -snes_converged_reason -da_refine 4 -da_overlap 3 -snes_type newtonls -pc_type gasm -malloc_dump 895 requires: !single 896 897 test: 898 suffix: 5_ngmres 899 args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type ngmres -snes_ngmres_m 10 900 901 test: 902 suffix: 5_ngmres_fas 903 args: -snes_rtol 1.e-4 -snes_type ngmres -npc_fas_coarse_snes_max_it 1 -npc_fas_coarse_snes_type newtonls -npc_fas_coarse_pc_type lu -npc_fas_coarse_ksp_type preonly -snes_ngmres_m 10 -snes_monitor_short -npc_snes_max_it 1 -npc_snes_type fas -npc_fas_coarse_ksp_type richardson -da_refine 6 904 905 test: 906 suffix: 5_ngmres_ngs 907 args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type ngmres -npc_snes_type ngs -npc_snes_max_it 1 908 909 test: 910 suffix: 5_ngmres_nrichardson 911 args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type ngmres -snes_ngmres_m 10 -npc_snes_type nrichardson -npc_snes_max_it 3 912 output_file: output/ex5_5_ngmres_richardson.out 913 914 test: 915 suffix: 5_nrichardson 916 args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type nrichardson 917 918 test: 919 suffix: 5_qn 920 args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type qn -snes_linesearch_type cp -snes_qn_m 10 921 922 test: 923 suffix: 6 924 nsize: 4 925 args: -snes_converged_reason -ksp_converged_reason -da_grid_x 129 -da_grid_y 129 -pc_type mg -pc_mg_levels 8 -mg_levels_ksp_type chebyshev -mg_levels_ksp_chebyshev_esteig 0,0.5,0,1.1 -mg_levels_ksp_max_it 2 926 927 test: 928 requires: complex !single 929 suffix: complex 930 args: -snes_mf_operator -mat_mffd_complex -snes_monitor 931 932 TEST*/ 933