1 2 static char help[] = "Tests recovery from domain errors in MatMult() and PCApply()\n\n"; 3 4 /* 5 See src/ksp/ksp/tutorials/ex19.c from which this was copied 6 */ 7 8 #include <petscsnes.h> 9 #include <petscdm.h> 10 #include <petscdmda.h> 11 12 /* 13 User-defined routines and data structures 14 */ 15 typedef struct { 16 PetscScalar u, v, omega, temp; 17 } Field; 18 19 PetscErrorCode FormFunctionLocal(DMDALocalInfo *, Field **, Field **, void *); 20 21 typedef struct { 22 PetscReal lidvelocity, prandtl, grashof; /* physical parameters */ 23 PetscBool draw_contours; /* flag - 1 indicates drawing contours */ 24 PetscBool errorindomain; 25 PetscBool errorindomainmf; 26 SNES snes; 27 } AppCtx; 28 29 typedef struct { 30 Mat Jmf; 31 } MatShellCtx; 32 33 extern PetscErrorCode FormInitialGuess(AppCtx *, DM, Vec); 34 extern PetscErrorCode MatMult_MyShell(Mat, Vec, Vec); 35 extern PetscErrorCode MatAssemblyEnd_MyShell(Mat, MatAssemblyType); 36 extern PetscErrorCode PCApply_MyShell(PC, Vec, Vec); 37 extern PetscErrorCode SNESComputeJacobian_MyShell(SNES, Vec, Mat, Mat, void *); 38 39 int main(int argc, char **argv) { 40 AppCtx user; /* user-defined work context */ 41 PetscInt mx, my; 42 MPI_Comm comm; 43 DM da; 44 Vec x; 45 Mat J = NULL, Jmf = NULL; 46 MatShellCtx matshellctx; 47 PetscInt mlocal, nlocal; 48 PC pc; 49 KSP ksp; 50 PetscBool errorinmatmult = PETSC_FALSE, errorinpcapply = PETSC_FALSE, errorinpcsetup = PETSC_FALSE; 51 52 PetscFunctionBeginUser; 53 PetscCall(PetscInitialize(&argc, &argv, (char *)0, help)); 54 PetscCall(PetscOptionsGetBool(NULL, NULL, "-error_in_matmult", &errorinmatmult, NULL)); 55 PetscCall(PetscOptionsGetBool(NULL, NULL, "-error_in_pcapply", &errorinpcapply, NULL)); 56 PetscCall(PetscOptionsGetBool(NULL, NULL, "-error_in_pcsetup", &errorinpcsetup, NULL)); 57 user.errorindomain = PETSC_FALSE; 58 PetscCall(PetscOptionsGetBool(NULL, NULL, "-error_in_domain", &user.errorindomain, NULL)); 59 user.errorindomainmf = PETSC_FALSE; 60 PetscCall(PetscOptionsGetBool(NULL, NULL, "-error_in_domainmf", &user.errorindomainmf, NULL)); 61 62 comm = PETSC_COMM_WORLD; 63 PetscCall(SNESCreate(comm, &user.snes)); 64 65 /* 66 Create distributed array object to manage parallel grid and vectors 67 for principal unknowns (x) and governing residuals (f) 68 */ 69 PetscCall(DMDACreate2d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DMDA_STENCIL_STAR, 4, 4, PETSC_DECIDE, PETSC_DECIDE, 4, 1, 0, 0, &da)); 70 PetscCall(DMSetFromOptions(da)); 71 PetscCall(DMSetUp(da)); 72 PetscCall(SNESSetDM(user.snes, da)); 73 74 PetscCall(DMDAGetInfo(da, 0, &mx, &my, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE)); 75 /* 76 Problem parameters (velocity of lid, prandtl, and grashof numbers) 77 */ 78 user.lidvelocity = 1.0 / (mx * my); 79 user.prandtl = 1.0; 80 user.grashof = 1.0; 81 82 PetscCall(PetscOptionsGetReal(NULL, NULL, "-lidvelocity", &user.lidvelocity, NULL)); 83 PetscCall(PetscOptionsGetReal(NULL, NULL, "-prandtl", &user.prandtl, NULL)); 84 PetscCall(PetscOptionsGetReal(NULL, NULL, "-grashof", &user.grashof, NULL)); 85 PetscCall(PetscOptionsHasName(NULL, NULL, "-contours", &user.draw_contours)); 86 87 PetscCall(DMDASetFieldName(da, 0, "x_velocity")); 88 PetscCall(DMDASetFieldName(da, 1, "y_velocity")); 89 PetscCall(DMDASetFieldName(da, 2, "Omega")); 90 PetscCall(DMDASetFieldName(da, 3, "temperature")); 91 92 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 93 Create user context, set problem data, create vector data structures. 94 Also, compute the initial guess. 95 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 96 97 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 98 Create nonlinear solver context 99 100 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 101 PetscCall(DMSetApplicationContext(da, &user)); 102 PetscCall(DMDASNESSetFunctionLocal(da, INSERT_VALUES, (PetscErrorCode(*)(DMDALocalInfo *, void *, void *, void *))FormFunctionLocal, &user)); 103 104 if (errorinmatmult) { 105 PetscCall(MatCreateSNESMF(user.snes, &Jmf)); 106 PetscCall(MatSetFromOptions(Jmf)); 107 PetscCall(MatGetLocalSize(Jmf, &mlocal, &nlocal)); 108 matshellctx.Jmf = Jmf; 109 PetscCall(MatCreateShell(PetscObjectComm((PetscObject)Jmf), mlocal, nlocal, PETSC_DECIDE, PETSC_DECIDE, &matshellctx, &J)); 110 PetscCall(MatShellSetOperation(J, MATOP_MULT, (void (*)(void))MatMult_MyShell)); 111 PetscCall(MatShellSetOperation(J, MATOP_ASSEMBLY_END, (void (*)(void))MatAssemblyEnd_MyShell)); 112 PetscCall(SNESSetJacobian(user.snes, J, J, MatMFFDComputeJacobian, NULL)); 113 } 114 115 PetscCall(SNESSetFromOptions(user.snes)); 116 PetscCall(PetscPrintf(comm, "lid velocity = %g, prandtl # = %g, grashof # = %g\n", (double)user.lidvelocity, (double)user.prandtl, (double)user.grashof)); 117 118 if (errorinpcapply) { 119 PetscCall(SNESGetKSP(user.snes, &ksp)); 120 PetscCall(KSPGetPC(ksp, &pc)); 121 PetscCall(PCSetType(pc, PCSHELL)); 122 PetscCall(PCShellSetApply(pc, PCApply_MyShell)); 123 } 124 125 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 126 Solve the nonlinear system 127 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 128 PetscCall(DMCreateGlobalVector(da, &x)); 129 PetscCall(FormInitialGuess(&user, da, x)); 130 131 if (errorinpcsetup) { 132 PetscCall(SNESSetUp(user.snes)); 133 PetscCall(SNESSetJacobian(user.snes, NULL, NULL, SNESComputeJacobian_MyShell, NULL)); 134 } 135 PetscCall(SNESSolve(user.snes, NULL, x)); 136 137 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 138 Free work space. All PETSc objects should be destroyed when they 139 are no longer needed. 140 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 141 PetscCall(MatDestroy(&J)); 142 PetscCall(MatDestroy(&Jmf)); 143 PetscCall(VecDestroy(&x)); 144 PetscCall(DMDestroy(&da)); 145 PetscCall(SNESDestroy(&user.snes)); 146 PetscCall(PetscFinalize()); 147 return 0; 148 } 149 150 /* 151 FormInitialGuess - Forms initial approximation. 152 153 Input Parameters: 154 user - user-defined application context 155 X - vector 156 157 Output Parameter: 158 X - vector 159 */ 160 PetscErrorCode FormInitialGuess(AppCtx *user, DM da, Vec X) { 161 PetscInt i, j, mx, xs, ys, xm, ym; 162 PetscReal grashof, dx; 163 Field **x; 164 165 PetscFunctionBeginUser; 166 grashof = user->grashof; 167 168 PetscCall(DMDAGetInfo(da, 0, &mx, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)); 169 dx = 1.0 / (mx - 1); 170 171 /* 172 Get local grid boundaries (for 2-dimensional DMDA): 173 xs, ys - starting grid indices (no ghost points) 174 xm, ym - widths of local grid (no ghost points) 175 */ 176 PetscCall(DMDAGetCorners(da, &xs, &ys, NULL, &xm, &ym, NULL)); 177 178 /* 179 Get a pointer to vector data. 180 - For default PETSc vectors, VecGetArray() returns a pointer to 181 the data array. Otherwise, the routine is implementation dependent. 182 - You MUST call VecRestoreArray() when you no longer need access to 183 the array. 184 */ 185 PetscCall(DMDAVecGetArray(da, X, &x)); 186 187 /* 188 Compute initial guess over the locally owned part of the grid 189 Initial condition is motionless fluid and equilibrium temperature 190 */ 191 for (j = ys; j < ys + ym; j++) { 192 for (i = xs; i < xs + xm; i++) { 193 x[j][i].u = 0.0; 194 x[j][i].v = 0.0; 195 x[j][i].omega = 0.0; 196 x[j][i].temp = (grashof > 0) * i * dx; 197 } 198 } 199 200 /* 201 Restore vector 202 */ 203 PetscCall(DMDAVecRestoreArray(da, X, &x)); 204 PetscFunctionReturn(0); 205 } 206 207 PetscErrorCode FormFunctionLocal(DMDALocalInfo *info, Field **x, Field **f, void *ptr) { 208 AppCtx *user = (AppCtx *)ptr; 209 PetscInt xints, xinte, yints, yinte, i, j; 210 PetscReal hx, hy, dhx, dhy, hxdhy, hydhx; 211 PetscReal grashof, prandtl, lid; 212 PetscScalar u, uxx, uyy, vx, vy, avx, avy, vxp, vxm, vyp, vym; 213 static PetscInt fail = 0; 214 215 PetscFunctionBeginUser; 216 if ((fail++ > 7 && user->errorindomainmf) || (fail++ > 36 && user->errorindomain)) { 217 PetscMPIInt rank; 218 PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)user->snes), &rank)); 219 if (rank == 0) PetscCall(SNESSetFunctionDomainError(user->snes)); 220 } 221 grashof = user->grashof; 222 prandtl = user->prandtl; 223 lid = user->lidvelocity; 224 225 /* 226 Define mesh intervals ratios for uniform grid. 227 228 Note: FD formulae below are normalized by multiplying through by 229 local volume element (i.e. hx*hy) to obtain coefficients O(1) in two dimensions. 230 231 */ 232 dhx = (PetscReal)(info->mx - 1); 233 dhy = (PetscReal)(info->my - 1); 234 hx = 1.0 / dhx; 235 hy = 1.0 / dhy; 236 hxdhy = hx * dhy; 237 hydhx = hy * dhx; 238 239 xints = info->xs; 240 xinte = info->xs + info->xm; 241 yints = info->ys; 242 yinte = info->ys + info->ym; 243 244 /* Test whether we are on the bottom edge of the global array */ 245 if (yints == 0) { 246 j = 0; 247 yints = yints + 1; 248 /* bottom edge */ 249 for (i = info->xs; i < info->xs + info->xm; i++) { 250 f[j][i].u = x[j][i].u; 251 f[j][i].v = x[j][i].v; 252 f[j][i].omega = x[j][i].omega + (x[j + 1][i].u - x[j][i].u) * dhy; 253 f[j][i].temp = x[j][i].temp - x[j + 1][i].temp; 254 } 255 } 256 257 /* Test whether we are on the top edge of the global array */ 258 if (yinte == info->my) { 259 j = info->my - 1; 260 yinte = yinte - 1; 261 /* top edge */ 262 for (i = info->xs; i < info->xs + info->xm; i++) { 263 f[j][i].u = x[j][i].u - lid; 264 f[j][i].v = x[j][i].v; 265 f[j][i].omega = x[j][i].omega + (x[j][i].u - x[j - 1][i].u) * dhy; 266 f[j][i].temp = x[j][i].temp - x[j - 1][i].temp; 267 } 268 } 269 270 /* Test whether we are on the left edge of the global array */ 271 if (xints == 0) { 272 i = 0; 273 xints = xints + 1; 274 /* left edge */ 275 for (j = info->ys; j < info->ys + info->ym; j++) { 276 f[j][i].u = x[j][i].u; 277 f[j][i].v = x[j][i].v; 278 f[j][i].omega = x[j][i].omega - (x[j][i + 1].v - x[j][i].v) * dhx; 279 f[j][i].temp = x[j][i].temp; 280 } 281 } 282 283 /* Test whether we are on the right edge of the global array */ 284 if (xinte == info->mx) { 285 i = info->mx - 1; 286 xinte = xinte - 1; 287 /* right edge */ 288 for (j = info->ys; j < info->ys + info->ym; j++) { 289 f[j][i].u = x[j][i].u; 290 f[j][i].v = x[j][i].v; 291 f[j][i].omega = x[j][i].omega - (x[j][i].v - x[j][i - 1].v) * dhx; 292 f[j][i].temp = x[j][i].temp - (PetscReal)(grashof > 0); 293 } 294 } 295 296 /* Compute over the interior points */ 297 for (j = yints; j < yinte; j++) { 298 for (i = xints; i < xinte; i++) { 299 /* 300 convective coefficients for upwinding 301 */ 302 vx = x[j][i].u; 303 avx = PetscAbsScalar(vx); 304 vxp = .5 * (vx + avx); 305 vxm = .5 * (vx - avx); 306 vy = x[j][i].v; 307 avy = PetscAbsScalar(vy); 308 vyp = .5 * (vy + avy); 309 vym = .5 * (vy - avy); 310 311 /* U velocity */ 312 u = x[j][i].u; 313 uxx = (2.0 * u - x[j][i - 1].u - x[j][i + 1].u) * hydhx; 314 uyy = (2.0 * u - x[j - 1][i].u - x[j + 1][i].u) * hxdhy; 315 f[j][i].u = uxx + uyy - .5 * (x[j + 1][i].omega - x[j - 1][i].omega) * hx; 316 317 /* V velocity */ 318 u = x[j][i].v; 319 uxx = (2.0 * u - x[j][i - 1].v - x[j][i + 1].v) * hydhx; 320 uyy = (2.0 * u - x[j - 1][i].v - x[j + 1][i].v) * hxdhy; 321 f[j][i].v = uxx + uyy + .5 * (x[j][i + 1].omega - x[j][i - 1].omega) * hy; 322 323 /* Omega */ 324 u = x[j][i].omega; 325 uxx = (2.0 * u - x[j][i - 1].omega - x[j][i + 1].omega) * hydhx; 326 uyy = (2.0 * u - x[j - 1][i].omega - x[j + 1][i].omega) * hxdhy; 327 f[j][i].omega = uxx + uyy + (vxp * (u - x[j][i - 1].omega) + vxm * (x[j][i + 1].omega - u)) * hy + (vyp * (u - x[j - 1][i].omega) + vym * (x[j + 1][i].omega - u)) * hx - .5 * grashof * (x[j][i + 1].temp - x[j][i - 1].temp) * hy; 328 329 /* Temperature */ 330 u = x[j][i].temp; 331 uxx = (2.0 * u - x[j][i - 1].temp - x[j][i + 1].temp) * hydhx; 332 uyy = (2.0 * u - x[j - 1][i].temp - x[j + 1][i].temp) * hxdhy; 333 f[j][i].temp = uxx + uyy + prandtl * ((vxp * (u - x[j][i - 1].temp) + vxm * (x[j][i + 1].temp - u)) * hy + (vyp * (u - x[j - 1][i].temp) + vym * (x[j + 1][i].temp - u)) * hx); 334 } 335 } 336 337 /* 338 Flop count (multiply-adds are counted as 2 operations) 339 */ 340 PetscCall(PetscLogFlops(84.0 * info->ym * info->xm)); 341 PetscFunctionReturn(0); 342 } 343 344 PetscErrorCode MatMult_MyShell(Mat A, Vec x, Vec y) { 345 MatShellCtx *matshellctx; 346 static PetscInt fail = 0; 347 348 PetscFunctionBegin; 349 PetscCall(MatShellGetContext(A, &matshellctx)); 350 PetscCall(MatMult(matshellctx->Jmf, x, y)); 351 if (fail++ > 5) { 352 PetscMPIInt rank; 353 PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)A), &rank)); 354 if (rank == 0) PetscCall(VecSetInf(y)); 355 } 356 PetscFunctionReturn(0); 357 } 358 359 PetscErrorCode MatAssemblyEnd_MyShell(Mat A, MatAssemblyType tp) { 360 MatShellCtx *matshellctx; 361 362 PetscFunctionBegin; 363 PetscCall(MatShellGetContext(A, &matshellctx)); 364 PetscCall(MatAssemblyEnd(matshellctx->Jmf, tp)); 365 PetscFunctionReturn(0); 366 } 367 368 PetscErrorCode PCApply_MyShell(PC pc, Vec x, Vec y) { 369 static PetscInt fail = 0; 370 371 PetscFunctionBegin; 372 PetscCall(VecCopy(x, y)); 373 if (fail++ > 3) { 374 PetscMPIInt rank; 375 PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank)); 376 if (rank == 0) PetscCall(VecSetInf(y)); 377 } 378 PetscFunctionReturn(0); 379 } 380 381 PETSC_EXTERN PetscErrorCode SNESComputeJacobian_DMDA(SNES, Vec, Mat, Mat, void *); 382 383 PetscErrorCode SNESComputeJacobian_MyShell(SNES snes, Vec X, Mat A, Mat B, void *ctx) { 384 static PetscInt fail = 0; 385 386 PetscFunctionBegin; 387 PetscCall(SNESComputeJacobian_DMDA(snes, X, A, B, ctx)); 388 if (fail++ > 0) PetscCall(MatZeroEntries(A)); 389 PetscFunctionReturn(0); 390 } 391 392 /*TEST 393 394 test: 395 args: -snes_converged_reason -ksp_converged_reason 396 397 test: 398 suffix: 2 399 args: -snes_converged_reason -ksp_converged_reason -error_in_matmult 400 401 test: 402 suffix: 3 403 args: -snes_converged_reason -ksp_converged_reason -error_in_pcapply 404 405 test: 406 suffix: 4 407 args: -snes_converged_reason -ksp_converged_reason -error_in_pcsetup 408 409 test: 410 suffix: 5 411 args: -snes_converged_reason -ksp_converged_reason -error_in_pcsetup -pc_type bjacobi 412 413 test: 414 suffix: 5_fieldsplit 415 args: -snes_converged_reason -ksp_converged_reason -error_in_pcsetup -pc_type fieldsplit 416 output_file: output/ex69_5.out 417 418 test: 419 suffix: 6 420 args: -snes_converged_reason -ksp_converged_reason -error_in_domainmf -snes_mf -pc_type none 421 422 test: 423 suffix: 7 424 args: -snes_converged_reason -ksp_converged_reason -error_in_domain 425 426 test: 427 suffix: 8 428 args: -snes_converged_reason -ksp_converged_reason -error_in_domain -snes_mf -pc_type none 429 430 TEST*/ 431