1 2 static char help[] = "Tests recovery from domain errors in MatMult() and PCApply()\n\n"; 3 4 /* 5 See src/ksp/ksp/tutorials/ex19.c from which this was copied 6 */ 7 8 #include <petscsnes.h> 9 #include <petscdm.h> 10 #include <petscdmda.h> 11 12 /* 13 User-defined routines and data structures 14 */ 15 typedef struct { 16 PetscScalar u,v,omega,temp; 17 } Field; 18 19 PetscErrorCode FormFunctionLocal(DMDALocalInfo*,Field**,Field**,void*); 20 21 typedef struct { 22 PetscReal lidvelocity,prandtl,grashof; /* physical parameters */ 23 PetscBool draw_contours; /* flag - 1 indicates drawing contours */ 24 PetscBool errorindomain; 25 PetscBool errorindomainmf; 26 SNES snes; 27 } AppCtx; 28 29 typedef struct { 30 Mat Jmf; 31 } MatShellCtx; 32 33 extern PetscErrorCode FormInitialGuess(AppCtx*,DM,Vec); 34 extern PetscErrorCode MatMult_MyShell(Mat,Vec,Vec); 35 extern PetscErrorCode MatAssemblyEnd_MyShell(Mat,MatAssemblyType); 36 extern PetscErrorCode PCApply_MyShell(PC,Vec,Vec); 37 extern PetscErrorCode SNESComputeJacobian_MyShell(SNES,Vec,Mat,Mat,void*); 38 39 int main(int argc,char **argv) 40 { 41 AppCtx user; /* user-defined work context */ 42 PetscInt mx,my; 43 MPI_Comm comm; 44 DM da; 45 Vec x; 46 Mat J = NULL,Jmf = NULL; 47 MatShellCtx matshellctx; 48 PetscInt mlocal,nlocal; 49 PC pc; 50 KSP ksp; 51 PetscBool errorinmatmult = PETSC_FALSE,errorinpcapply = PETSC_FALSE,errorinpcsetup = PETSC_FALSE; 52 53 PetscCall(PetscInitialize(&argc,&argv,(char*)0,help)); 54 PetscCall(PetscOptionsGetBool(NULL,NULL,"-error_in_matmult",&errorinmatmult,NULL)); 55 PetscCall(PetscOptionsGetBool(NULL,NULL,"-error_in_pcapply",&errorinpcapply,NULL)); 56 PetscCall(PetscOptionsGetBool(NULL,NULL,"-error_in_pcsetup",&errorinpcsetup,NULL)); 57 user.errorindomain = PETSC_FALSE; 58 PetscCall(PetscOptionsGetBool(NULL,NULL,"-error_in_domain",&user.errorindomain,NULL)); 59 user.errorindomainmf = PETSC_FALSE; 60 PetscCall(PetscOptionsGetBool(NULL,NULL,"-error_in_domainmf",&user.errorindomainmf,NULL)); 61 62 comm = PETSC_COMM_WORLD; 63 PetscCall(SNESCreate(comm,&user.snes)); 64 65 /* 66 Create distributed array object to manage parallel grid and vectors 67 for principal unknowns (x) and governing residuals (f) 68 */ 69 PetscCall(DMDACreate2d(PETSC_COMM_WORLD,DM_BOUNDARY_NONE,DM_BOUNDARY_NONE,DMDA_STENCIL_STAR,4,4,PETSC_DECIDE,PETSC_DECIDE,4,1,0,0,&da)); 70 PetscCall(DMSetFromOptions(da)); 71 PetscCall(DMSetUp(da)); 72 PetscCall(SNESSetDM(user.snes,da)); 73 74 PetscCall(DMDAGetInfo(da,0,&mx,&my,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE, 75 PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE)); 76 /* 77 Problem parameters (velocity of lid, prandtl, and grashof numbers) 78 */ 79 user.lidvelocity = 1.0/(mx*my); 80 user.prandtl = 1.0; 81 user.grashof = 1.0; 82 83 PetscCall(PetscOptionsGetReal(NULL,NULL,"-lidvelocity",&user.lidvelocity,NULL)); 84 PetscCall(PetscOptionsGetReal(NULL,NULL,"-prandtl",&user.prandtl,NULL)); 85 PetscCall(PetscOptionsGetReal(NULL,NULL,"-grashof",&user.grashof,NULL)); 86 PetscCall(PetscOptionsHasName(NULL,NULL,"-contours",&user.draw_contours)); 87 88 PetscCall(DMDASetFieldName(da,0,"x_velocity")); 89 PetscCall(DMDASetFieldName(da,1,"y_velocity")); 90 PetscCall(DMDASetFieldName(da,2,"Omega")); 91 PetscCall(DMDASetFieldName(da,3,"temperature")); 92 93 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 94 Create user context, set problem data, create vector data structures. 95 Also, compute the initial guess. 96 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 97 98 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 99 Create nonlinear solver context 100 101 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 102 PetscCall(DMSetApplicationContext(da,&user)); 103 PetscCall(DMDASNESSetFunctionLocal(da,INSERT_VALUES,(PetscErrorCode (*)(DMDALocalInfo*,void*,void*,void*))FormFunctionLocal,&user)); 104 105 if (errorinmatmult) { 106 PetscCall(MatCreateSNESMF(user.snes,&Jmf)); 107 PetscCall(MatSetFromOptions(Jmf)); 108 PetscCall(MatGetLocalSize(Jmf,&mlocal,&nlocal)); 109 matshellctx.Jmf = Jmf; 110 PetscCall(MatCreateShell(PetscObjectComm((PetscObject)Jmf),mlocal,nlocal,PETSC_DECIDE,PETSC_DECIDE,&matshellctx,&J)); 111 PetscCall(MatShellSetOperation(J,MATOP_MULT,(void (*)(void))MatMult_MyShell)); 112 PetscCall(MatShellSetOperation(J,MATOP_ASSEMBLY_END,(void (*)(void))MatAssemblyEnd_MyShell)); 113 PetscCall(SNESSetJacobian(user.snes,J,J,MatMFFDComputeJacobian,NULL)); 114 } 115 116 PetscCall(SNESSetFromOptions(user.snes)); 117 PetscCall(PetscPrintf(comm,"lid velocity = %g, prandtl # = %g, grashof # = %g\n",(double)user.lidvelocity,(double)user.prandtl,(double)user.grashof)); 118 119 if (errorinpcapply) { 120 PetscCall(SNESGetKSP(user.snes,&ksp)); 121 PetscCall(KSPGetPC(ksp,&pc)); 122 PetscCall(PCSetType(pc,PCSHELL)); 123 PetscCall(PCShellSetApply(pc,PCApply_MyShell)); 124 } 125 126 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 127 Solve the nonlinear system 128 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 129 PetscCall(DMCreateGlobalVector(da,&x)); 130 PetscCall(FormInitialGuess(&user,da,x)); 131 132 if (errorinpcsetup) { 133 PetscCall(SNESSetUp(user.snes)); 134 PetscCall(SNESSetJacobian(user.snes,NULL,NULL,SNESComputeJacobian_MyShell,NULL)); 135 } 136 PetscCall(SNESSolve(user.snes,NULL,x)); 137 138 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 139 Free work space. All PETSc objects should be destroyed when they 140 are no longer needed. 141 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 142 PetscCall(MatDestroy(&J)); 143 PetscCall(MatDestroy(&Jmf)); 144 PetscCall(VecDestroy(&x)); 145 PetscCall(DMDestroy(&da)); 146 PetscCall(SNESDestroy(&user.snes)); 147 PetscCall(PetscFinalize()); 148 return 0; 149 } 150 151 /* ------------------------------------------------------------------- */ 152 153 /* 154 FormInitialGuess - Forms initial approximation. 155 156 Input Parameters: 157 user - user-defined application context 158 X - vector 159 160 Output Parameter: 161 X - vector 162 */ 163 PetscErrorCode FormInitialGuess(AppCtx *user,DM da,Vec X) 164 { 165 PetscInt i,j,mx,xs,ys,xm,ym; 166 PetscReal grashof,dx; 167 Field **x; 168 169 PetscFunctionBeginUser; 170 grashof = user->grashof; 171 172 PetscCall(DMDAGetInfo(da,0,&mx,0,0,0,0,0,0,0,0,0,0,0)); 173 dx = 1.0/(mx-1); 174 175 /* 176 Get local grid boundaries (for 2-dimensional DMDA): 177 xs, ys - starting grid indices (no ghost points) 178 xm, ym - widths of local grid (no ghost points) 179 */ 180 PetscCall(DMDAGetCorners(da,&xs,&ys,NULL,&xm,&ym,NULL)); 181 182 /* 183 Get a pointer to vector data. 184 - For default PETSc vectors, VecGetArray() returns a pointer to 185 the data array. Otherwise, the routine is implementation dependent. 186 - You MUST call VecRestoreArray() when you no longer need access to 187 the array. 188 */ 189 PetscCall(DMDAVecGetArray(da,X,&x)); 190 191 /* 192 Compute initial guess over the locally owned part of the grid 193 Initial condition is motionless fluid and equilibrium temperature 194 */ 195 for (j=ys; j<ys+ym; j++) { 196 for (i=xs; i<xs+xm; i++) { 197 x[j][i].u = 0.0; 198 x[j][i].v = 0.0; 199 x[j][i].omega = 0.0; 200 x[j][i].temp = (grashof>0)*i*dx; 201 } 202 } 203 204 /* 205 Restore vector 206 */ 207 PetscCall(DMDAVecRestoreArray(da,X,&x)); 208 PetscFunctionReturn(0); 209 } 210 211 PetscErrorCode FormFunctionLocal(DMDALocalInfo *info,Field **x,Field **f,void *ptr) 212 { 213 AppCtx *user = (AppCtx*)ptr; 214 PetscInt xints,xinte,yints,yinte,i,j; 215 PetscReal hx,hy,dhx,dhy,hxdhy,hydhx; 216 PetscReal grashof,prandtl,lid; 217 PetscScalar u,uxx,uyy,vx,vy,avx,avy,vxp,vxm,vyp,vym; 218 static PetscInt fail = 0; 219 220 PetscFunctionBeginUser; 221 if ((fail++ > 7 && user->errorindomainmf) || (fail++ > 36 && user->errorindomain)) { 222 PetscMPIInt rank; 223 PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)user->snes),&rank)); 224 if (rank == 0) { 225 PetscCall(SNESSetFunctionDomainError(user->snes)); 226 } 227 } 228 grashof = user->grashof; 229 prandtl = user->prandtl; 230 lid = user->lidvelocity; 231 232 /* 233 Define mesh intervals ratios for uniform grid. 234 235 Note: FD formulae below are normalized by multiplying through by 236 local volume element (i.e. hx*hy) to obtain coefficients O(1) in two dimensions. 237 238 */ 239 dhx = (PetscReal)(info->mx-1); dhy = (PetscReal)(info->my-1); 240 hx = 1.0/dhx; hy = 1.0/dhy; 241 hxdhy = hx*dhy; hydhx = hy*dhx; 242 243 xints = info->xs; xinte = info->xs+info->xm; yints = info->ys; yinte = info->ys+info->ym; 244 245 /* Test whether we are on the bottom edge of the global array */ 246 if (yints == 0) { 247 j = 0; 248 yints = yints + 1; 249 /* bottom edge */ 250 for (i=info->xs; i<info->xs+info->xm; i++) { 251 f[j][i].u = x[j][i].u; 252 f[j][i].v = x[j][i].v; 253 f[j][i].omega = x[j][i].omega + (x[j+1][i].u - x[j][i].u)*dhy; 254 f[j][i].temp = x[j][i].temp-x[j+1][i].temp; 255 } 256 } 257 258 /* Test whether we are on the top edge of the global array */ 259 if (yinte == info->my) { 260 j = info->my - 1; 261 yinte = yinte - 1; 262 /* top edge */ 263 for (i=info->xs; i<info->xs+info->xm; i++) { 264 f[j][i].u = x[j][i].u - lid; 265 f[j][i].v = x[j][i].v; 266 f[j][i].omega = x[j][i].omega + (x[j][i].u - x[j-1][i].u)*dhy; 267 f[j][i].temp = x[j][i].temp-x[j-1][i].temp; 268 } 269 } 270 271 /* Test whether we are on the left edge of the global array */ 272 if (xints == 0) { 273 i = 0; 274 xints = xints + 1; 275 /* left edge */ 276 for (j=info->ys; j<info->ys+info->ym; j++) { 277 f[j][i].u = x[j][i].u; 278 f[j][i].v = x[j][i].v; 279 f[j][i].omega = x[j][i].omega - (x[j][i+1].v - x[j][i].v)*dhx; 280 f[j][i].temp = x[j][i].temp; 281 } 282 } 283 284 /* Test whether we are on the right edge of the global array */ 285 if (xinte == info->mx) { 286 i = info->mx - 1; 287 xinte = xinte - 1; 288 /* right edge */ 289 for (j=info->ys; j<info->ys+info->ym; j++) { 290 f[j][i].u = x[j][i].u; 291 f[j][i].v = x[j][i].v; 292 f[j][i].omega = x[j][i].omega - (x[j][i].v - x[j][i-1].v)*dhx; 293 f[j][i].temp = x[j][i].temp - (PetscReal)(grashof>0); 294 } 295 } 296 297 /* Compute over the interior points */ 298 for (j=yints; j<yinte; j++) { 299 for (i=xints; i<xinte; i++) { 300 301 /* 302 convective coefficients for upwinding 303 */ 304 vx = x[j][i].u; avx = PetscAbsScalar(vx); 305 vxp = .5*(vx+avx); vxm = .5*(vx-avx); 306 vy = x[j][i].v; avy = PetscAbsScalar(vy); 307 vyp = .5*(vy+avy); vym = .5*(vy-avy); 308 309 /* U velocity */ 310 u = x[j][i].u; 311 uxx = (2.0*u - x[j][i-1].u - x[j][i+1].u)*hydhx; 312 uyy = (2.0*u - x[j-1][i].u - x[j+1][i].u)*hxdhy; 313 f[j][i].u = uxx + uyy - .5*(x[j+1][i].omega-x[j-1][i].omega)*hx; 314 315 /* V velocity */ 316 u = x[j][i].v; 317 uxx = (2.0*u - x[j][i-1].v - x[j][i+1].v)*hydhx; 318 uyy = (2.0*u - x[j-1][i].v - x[j+1][i].v)*hxdhy; 319 f[j][i].v = uxx + uyy + .5*(x[j][i+1].omega-x[j][i-1].omega)*hy; 320 321 /* Omega */ 322 u = x[j][i].omega; 323 uxx = (2.0*u - x[j][i-1].omega - x[j][i+1].omega)*hydhx; 324 uyy = (2.0*u - x[j-1][i].omega - x[j+1][i].omega)*hxdhy; 325 f[j][i].omega = uxx + uyy + (vxp*(u - x[j][i-1].omega) + vxm*(x[j][i+1].omega - u))*hy + 326 (vyp*(u - x[j-1][i].omega) + vym*(x[j+1][i].omega - u))*hx - 327 .5*grashof*(x[j][i+1].temp - x[j][i-1].temp)*hy; 328 329 /* Temperature */ 330 u = x[j][i].temp; 331 uxx = (2.0*u - x[j][i-1].temp - x[j][i+1].temp)*hydhx; 332 uyy = (2.0*u - x[j-1][i].temp - x[j+1][i].temp)*hxdhy; 333 f[j][i].temp = uxx + uyy + prandtl*((vxp*(u - x[j][i-1].temp) + vxm*(x[j][i+1].temp - u))*hy + 334 (vyp*(u - x[j-1][i].temp) + vym*(x[j+1][i].temp - u))*hx); 335 } 336 } 337 338 /* 339 Flop count (multiply-adds are counted as 2 operations) 340 */ 341 PetscCall(PetscLogFlops(84.0*info->ym*info->xm)); 342 PetscFunctionReturn(0); 343 } 344 345 PetscErrorCode MatMult_MyShell(Mat A,Vec x,Vec y) 346 { 347 MatShellCtx *matshellctx; 348 static PetscInt fail = 0; 349 350 PetscFunctionBegin; 351 PetscCall(MatShellGetContext(A,&matshellctx)); 352 PetscCall(MatMult(matshellctx->Jmf,x,y)); 353 if (fail++ > 5) { 354 PetscMPIInt rank; 355 PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)A),&rank)); 356 if (rank == 0) PetscCall(VecSetInf(y)); 357 } 358 PetscFunctionReturn(0); 359 } 360 361 PetscErrorCode MatAssemblyEnd_MyShell(Mat A,MatAssemblyType tp) 362 { 363 MatShellCtx *matshellctx; 364 365 PetscFunctionBegin; 366 PetscCall(MatShellGetContext(A,&matshellctx)); 367 PetscCall(MatAssemblyEnd(matshellctx->Jmf,tp)); 368 PetscFunctionReturn(0); 369 } 370 371 PetscErrorCode PCApply_MyShell(PC pc,Vec x,Vec y) 372 { 373 static PetscInt fail = 0; 374 375 PetscFunctionBegin; 376 PetscCall(VecCopy(x,y)); 377 if (fail++ > 3) { 378 PetscMPIInt rank; 379 PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank)); 380 if (rank == 0) PetscCall(VecSetInf(y)); 381 } 382 PetscFunctionReturn(0); 383 } 384 385 PETSC_EXTERN PetscErrorCode SNESComputeJacobian_DMDA(SNES,Vec,Mat,Mat,void*); 386 387 PetscErrorCode SNESComputeJacobian_MyShell(SNES snes,Vec X,Mat A,Mat B,void *ctx) 388 { 389 static PetscInt fail = 0; 390 391 PetscFunctionBegin; 392 PetscCall(SNESComputeJacobian_DMDA(snes,X,A,B,ctx)); 393 if (fail++ > 0) { 394 PetscCall(MatZeroEntries(A)); 395 } 396 PetscFunctionReturn(0); 397 } 398 399 /*TEST 400 401 test: 402 args: -snes_converged_reason -ksp_converged_reason 403 404 test: 405 suffix: 2 406 args: -snes_converged_reason -ksp_converged_reason -error_in_matmult 407 408 test: 409 suffix: 3 410 args: -snes_converged_reason -ksp_converged_reason -error_in_pcapply 411 412 test: 413 suffix: 4 414 args: -snes_converged_reason -ksp_converged_reason -error_in_pcsetup 415 416 test: 417 suffix: 5 418 args: -snes_converged_reason -ksp_converged_reason -error_in_pcsetup -pc_type bjacobi 419 420 test: 421 suffix: 5_fieldsplit 422 args: -snes_converged_reason -ksp_converged_reason -error_in_pcsetup -pc_type fieldsplit 423 output_file: output/ex69_5.out 424 425 test: 426 suffix: 6 427 args: -snes_converged_reason -ksp_converged_reason -error_in_domainmf -snes_mf -pc_type none 428 429 test: 430 suffix: 7 431 args: -snes_converged_reason -ksp_converged_reason -error_in_domain 432 433 test: 434 suffix: 8 435 args: -snes_converged_reason -ksp_converged_reason -error_in_domain -snes_mf -pc_type none 436 437 TEST*/ 438