1 static const char help[] = "-Laplacian u = b as a nonlinear problem.\n\n"; 2 3 /* 4 5 The linear and nonlinear versions of these should give almost identical results on this problem 6 7 Richardson 8 Nonlinear: 9 -snes_rtol 1.e-12 -snes_monitor -snes_type nrichardson -snes_linesearch_monitor 10 11 Linear: 12 -snes_rtol 1.e-12 -snes_monitor -ksp_rtol 1.e-12 -ksp_monitor -ksp_type richardson -pc_type none -ksp_richardson_self_scale -info 13 14 GMRES 15 Nonlinear: 16 -snes_rtol 1.e-12 -snes_monitor -snes_type ngmres 17 18 Linear: 19 -snes_rtol 1.e-12 -snes_monitor -ksp_type gmres -ksp_monitor -ksp_rtol 1.e-12 -pc_type none 20 21 CG 22 Nonlinear: 23 -snes_rtol 1.e-12 -snes_monitor -snes_type ncg -snes_linesearch_monitor 24 25 Linear: 26 -snes_rtol 1.e-12 -snes_monitor -ksp_type cg -ksp_monitor -ksp_rtol 1.e-12 -pc_type none 27 28 Multigrid 29 Linear: 30 1 level: 31 -snes_rtol 1.e-12 -snes_monitor -pc_type mg -mg_levels_ksp_type richardson -mg_levels_pc_type none -mg_levels_ksp_monitor 32 -mg_levels_ksp_richardson_self_scale -ksp_type richardson -ksp_monitor -ksp_rtol 1.e-12 -ksp_monitor_true_residual 33 34 n levels: 35 -da_refine n 36 37 Nonlinear: 38 1 level: 39 -snes_rtol 1.e-12 -snes_monitor -snes_type fas -fas_levels_snes_monitor 40 41 n levels: 42 -da_refine n -fas_coarse_snes_type newtonls -fas_coarse_pc_type lu -fas_coarse_ksp_type preonly 43 44 */ 45 46 /* 47 Include "petscdmda.h" so that we can use distributed arrays (DMDAs). 48 Include "petscsnes.h" so that we can use SNES solvers. Note that this 49 */ 50 #include <petscdm.h> 51 #include <petscdmda.h> 52 #include <petscsnes.h> 53 54 /* 55 User-defined routines 56 */ 57 extern PetscErrorCode FormMatrix(DM,Mat); 58 extern PetscErrorCode MyComputeFunction(SNES,Vec,Vec,void*); 59 extern PetscErrorCode MyComputeJacobian(SNES,Vec,Mat,Mat,void*); 60 extern PetscErrorCode NonlinearGS(SNES,Vec); 61 62 int main(int argc,char **argv) 63 { 64 SNES snes; /* nonlinear solver */ 65 SNES psnes; /* nonlinear Gauss-Seidel approximate solver */ 66 Vec x,b; /* solution vector */ 67 PetscInt its; /* iterations for convergence */ 68 DM da; 69 PetscBool use_ngs_as_npc = PETSC_FALSE; /* use the nonlinear Gauss-Seidel approximate solver */ 70 71 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 72 Initialize program 73 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 74 75 PetscFunctionBeginUser; 76 PetscCall(PetscInitialize(&argc,&argv,(char*)0,help)); 77 78 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 79 Create nonlinear solver context 80 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 81 PetscCall(SNESCreate(PETSC_COMM_WORLD,&snes)); 82 83 PetscCall(PetscOptionsGetBool(NULL,NULL,"-use_ngs_as_npc",&use_ngs_as_npc,0)); 84 85 if (use_ngs_as_npc) { 86 PetscCall(SNESGetNPC(snes,&psnes)); 87 PetscCall(SNESSetType(psnes,SNESSHELL)); 88 PetscCall(SNESShellSetSolve(psnes,NonlinearGS)); 89 } 90 91 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 92 Create distributed array (DMDA) to manage parallel grid and vectors 93 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 94 PetscCall(DMDACreate2d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE,DMDA_STENCIL_STAR,4,4,PETSC_DECIDE,PETSC_DECIDE,1,1,NULL,NULL,&da)); 95 PetscCall(DMSetFromOptions(da)); 96 PetscCall(DMSetUp(da)); 97 PetscCall(DMDASetUniformCoordinates(da, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0)); 98 PetscCall(SNESSetDM(snes,da)); 99 if (use_ngs_as_npc) PetscCall(SNESShellSetContext(psnes,da)); 100 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 101 Extract global vectors from DMDA; then duplicate for remaining 102 vectors that are the same types 103 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 104 PetscCall(DMCreateGlobalVector(da,&x)); 105 PetscCall(DMCreateGlobalVector(da,&b)); 106 PetscCall(VecSet(b,1.0)); 107 108 PetscCall(SNESSetFunction(snes,NULL,MyComputeFunction,NULL)); 109 PetscCall(SNESSetJacobian(snes,NULL,NULL,MyComputeJacobian,NULL)); 110 111 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 112 Customize nonlinear solver; set runtime options 113 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 114 PetscCall(SNESSetFromOptions(snes)); 115 116 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 117 Solve nonlinear system 118 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 119 PetscCall(SNESSolve(snes,b,x)); 120 PetscCall(SNESGetIterationNumber(snes,&its)); 121 122 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 123 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 124 125 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 126 Free work space. All PETSc objects should be destroyed when they 127 are no longer needed. 128 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 129 PetscCall(VecDestroy(&x)); 130 PetscCall(VecDestroy(&b)); 131 PetscCall(SNESDestroy(&snes)); 132 PetscCall(DMDestroy(&da)); 133 PetscCall(PetscFinalize()); 134 return 0; 135 } 136 137 /* ------------------------------------------------------------------- */ 138 PetscErrorCode MyComputeFunction(SNES snes,Vec x,Vec F,void *ctx) 139 { 140 Mat J; 141 DM dm; 142 143 PetscFunctionBeginUser; 144 PetscCall(SNESGetDM(snes,&dm)); 145 PetscCall(DMGetApplicationContext(dm,&J)); 146 if (!J) { 147 PetscCall(DMSetMatType(dm,MATAIJ)); 148 PetscCall(DMCreateMatrix(dm,&J)); 149 PetscCall(MatSetDM(J, NULL)); 150 PetscCall(FormMatrix(dm,J)); 151 PetscCall(DMSetApplicationContext(dm,J)); 152 PetscCall(DMSetApplicationContextDestroy(dm,(PetscErrorCode (*)(void**))MatDestroy)); 153 } 154 PetscCall(MatMult(J,x,F)); 155 PetscFunctionReturn(0); 156 } 157 158 PetscErrorCode MyComputeJacobian(SNES snes,Vec x,Mat J,Mat Jp,void *ctx) 159 { 160 DM dm; 161 162 PetscFunctionBeginUser; 163 PetscCall(SNESGetDM(snes,&dm)); 164 PetscCall(FormMatrix(dm,Jp)); 165 PetscFunctionReturn(0); 166 } 167 168 PetscErrorCode FormMatrix(DM da,Mat jac) 169 { 170 PetscInt i,j,nrows = 0; 171 MatStencil col[5],row,*rows; 172 PetscScalar v[5],hx,hy,hxdhy,hydhx; 173 DMDALocalInfo info; 174 175 PetscFunctionBeginUser; 176 PetscCall(DMDAGetLocalInfo(da,&info)); 177 hx = 1.0/(PetscReal)(info.mx-1); 178 hy = 1.0/(PetscReal)(info.my-1); 179 hxdhy = hx/hy; 180 hydhx = hy/hx; 181 182 PetscCall(PetscMalloc1(info.ym*info.xm,&rows)); 183 /* 184 Compute entries for the locally owned part of the Jacobian. 185 - Currently, all PETSc parallel matrix formats are partitioned by 186 contiguous chunks of rows across the processors. 187 - Each processor needs to insert only elements that it owns 188 locally (but any non-local elements will be sent to the 189 appropriate processor during matrix assembly). 190 - Here, we set all entries for a particular row at once. 191 - We can set matrix entries either using either 192 MatSetValuesLocal() or MatSetValues(), as discussed above. 193 */ 194 for (j=info.ys; j<info.ys+info.ym; j++) { 195 for (i=info.xs; i<info.xs+info.xm; i++) { 196 row.j = j; row.i = i; 197 /* boundary points */ 198 if (i == 0 || j == 0 || i == info.mx-1 || j == info.my-1) { 199 v[0] = 2.0*(hydhx + hxdhy); 200 PetscCall(MatSetValuesStencil(jac,1,&row,1,&row,v,INSERT_VALUES)); 201 rows[nrows].i = i; 202 rows[nrows++].j = j; 203 } else { 204 /* interior grid points */ 205 v[0] = -hxdhy; col[0].j = j - 1; col[0].i = i; 206 v[1] = -hydhx; col[1].j = j; col[1].i = i-1; 207 v[2] = 2.0*(hydhx + hxdhy); col[2].j = row.j; col[2].i = row.i; 208 v[3] = -hydhx; col[3].j = j; col[3].i = i+1; 209 v[4] = -hxdhy; col[4].j = j + 1; col[4].i = i; 210 PetscCall(MatSetValuesStencil(jac,1,&row,5,col,v,INSERT_VALUES)); 211 } 212 } 213 } 214 215 /* 216 Assemble matrix, using the 2-step process: 217 MatAssemblyBegin(), MatAssemblyEnd(). 218 */ 219 PetscCall(MatAssemblyBegin(jac,MAT_FINAL_ASSEMBLY)); 220 PetscCall(MatAssemblyEnd(jac,MAT_FINAL_ASSEMBLY)); 221 PetscCall(MatZeroRowsColumnsStencil(jac,nrows,rows,2.0*(hydhx + hxdhy),NULL,NULL)); 222 PetscCall(PetscFree(rows)); 223 /* 224 Tell the matrix we will never add a new nonzero location to the 225 matrix. If we do, it will generate an error. 226 */ 227 PetscCall(MatSetOption(jac,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE)); 228 PetscFunctionReturn(0); 229 } 230 231 /* ------------------------------------------------------------------- */ 232 /* 233 Applies some sweeps on nonlinear Gauss-Seidel on each process 234 235 */ 236 PetscErrorCode NonlinearGS(SNES snes,Vec X) 237 { 238 PetscInt i,j,Mx,My,xs,ys,xm,ym,its,l; 239 PetscReal hx,hy,hxdhy,hydhx; 240 PetscScalar **x,F,J,u,uxx,uyy; 241 DM da; 242 Vec localX; 243 244 PetscFunctionBeginUser; 245 PetscCall(SNESGetTolerances(snes,NULL,NULL,NULL,&its,NULL)); 246 PetscCall(SNESShellGetContext(snes,&da)); 247 248 PetscCall(DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE)); 249 250 hx = 1.0/(PetscReal)(Mx-1); 251 hy = 1.0/(PetscReal)(My-1); 252 hxdhy = hx/hy; 253 hydhx = hy/hx; 254 255 PetscCall(DMGetLocalVector(da,&localX)); 256 257 for (l=0; l<its; l++) { 258 259 PetscCall(DMGlobalToLocalBegin(da,X,INSERT_VALUES,localX)); 260 PetscCall(DMGlobalToLocalEnd(da,X,INSERT_VALUES,localX)); 261 /* 262 Get a pointer to vector data. 263 - For default PETSc vectors, VecGetArray() returns a pointer to 264 the data array. Otherwise, the routine is implementation dependent. 265 - You MUST call VecRestoreArray() when you no longer need access to 266 the array. 267 */ 268 PetscCall(DMDAVecGetArray(da,localX,&x)); 269 270 /* 271 Get local grid boundaries (for 2-dimensional DMDA): 272 xs, ys - starting grid indices (no ghost points) 273 xm, ym - widths of local grid (no ghost points) 274 275 */ 276 PetscCall(DMDAGetCorners(da,&xs,&ys,NULL,&xm,&ym,NULL)); 277 278 for (j=ys; j<ys+ym; j++) { 279 for (i=xs; i<xs+xm; i++) { 280 if (i == 0 || j == 0 || i == Mx-1 || j == My-1) { 281 /* boundary conditions are all zero Dirichlet */ 282 x[j][i] = 0.0; 283 } else { 284 u = x[j][i]; 285 uxx = (2.0*u - x[j][i-1] - x[j][i+1])*hydhx; 286 uyy = (2.0*u - x[j-1][i] - x[j+1][i])*hxdhy; 287 F = uxx + uyy; 288 J = 2.0*(hydhx + hxdhy); 289 u = u - F/J; 290 291 x[j][i] = u; 292 } 293 } 294 } 295 296 /* 297 Restore vector 298 */ 299 PetscCall(DMDAVecRestoreArray(da,localX,&x)); 300 PetscCall(DMLocalToGlobalBegin(da,localX,INSERT_VALUES,X)); 301 PetscCall(DMLocalToGlobalEnd(da,localX,INSERT_VALUES,X)); 302 } 303 PetscCall(DMRestoreLocalVector(da,&localX)); 304 PetscFunctionReturn(0); 305 } 306 307 /*TEST 308 309 test: 310 args: -snes_monitor_short -snes_type nrichardson 311 requires: !single 312 313 test: 314 suffix: 2 315 args: -snes_monitor_short -ksp_monitor_short -ksp_type richardson -pc_type none -ksp_richardson_self_scale 316 requires: !single 317 318 test: 319 suffix: 3 320 args: -snes_monitor_short -snes_type ngmres 321 322 test: 323 suffix: 4 324 args: -snes_monitor_short -ksp_type gmres -ksp_monitor_short -pc_type none 325 326 test: 327 suffix: 5 328 args: -snes_monitor_short -snes_type ncg 329 330 test: 331 suffix: 6 332 args: -snes_monitor_short -ksp_type cg -ksp_monitor_short -pc_type none 333 334 test: 335 suffix: 7 336 args: -da_refine 2 -snes_monitor_short -pc_type mg -mg_levels_ksp_type richardson -mg_levels_pc_type none -mg_levels_ksp_monitor_short -mg_levels_ksp_richardson_self_scale -ksp_type richardson -ksp_monitor_short 337 requires: !single 338 339 test: 340 suffix: 8 341 args: -da_refine 2 -snes_monitor_short -snes_type fas -fas_levels_snes_monitor_short -fas_coarse_snes_type newtonls -fas_coarse_pc_type lu -fas_coarse_ksp_type preonly -snes_type fas -snes_rtol 1.e-5 342 343 test: 344 suffix: 9 345 args: -snes_monitor_short -ksp_type gmres -ksp_monitor_short -pc_type none -snes_type newtontrdc 346 347 test: 348 suffix: 10 349 args: -snes_monitor_short -ksp_type gmres -ksp_monitor_short -pc_type none -snes_type newtontrdc -snes_trdc_use_cauchy false 350 351 TEST*/ 352