1 static const char help[] = "-Laplacian u = b as a nonlinear problem.\n\n"; 2 3 /*T 4 Concepts: SNES^parallel Bratu example 5 Concepts: DMDA^using distributed arrays; 6 Concepts: IS coloirng types; 7 Processors: n 8 T*/ 9 10 /* 11 12 The linear and nonlinear versions of these should give almost identical results on this problem 13 14 Richardson 15 Nonlinear: 16 -snes_rtol 1.e-12 -snes_monitor -snes_type nrichardson -snes_linesearch_monitor 17 18 Linear: 19 -snes_rtol 1.e-12 -snes_monitor -ksp_rtol 1.e-12 -ksp_monitor -ksp_type richardson -pc_type none -ksp_richardson_self_scale -info 20 21 GMRES 22 Nonlinear: 23 -snes_rtol 1.e-12 -snes_monitor -snes_type ngmres 24 25 Linear: 26 -snes_rtol 1.e-12 -snes_monitor -ksp_type gmres -ksp_monitor -ksp_rtol 1.e-12 -pc_type none 27 28 CG 29 Nonlinear: 30 -snes_rtol 1.e-12 -snes_monitor -snes_type ncg -snes_linesearch_monitor 31 32 Linear: 33 -snes_rtol 1.e-12 -snes_monitor -ksp_type cg -ksp_monitor -ksp_rtol 1.e-12 -pc_type none 34 35 Multigrid 36 Linear: 37 1 level: 38 -snes_rtol 1.e-12 -snes_monitor -pc_type mg -mg_levels_ksp_type richardson -mg_levels_pc_type none -mg_levels_ksp_monitor 39 -mg_levels_ksp_richardson_self_scale -ksp_type richardson -ksp_monitor -ksp_rtol 1.e-12 -ksp_monitor_true_residual 40 41 n levels: 42 -da_refine n 43 44 Nonlinear: 45 1 level: 46 -snes_rtol 1.e-12 -snes_monitor -snes_type fas -fas_levels_snes_monitor 47 48 n levels: 49 -da_refine n -fas_coarse_snes_type newtonls -fas_coarse_pc_type lu -fas_coarse_ksp_type preonly 50 51 */ 52 53 /* 54 Include "petscdmda.h" so that we can use distributed arrays (DMDAs). 55 Include "petscsnes.h" so that we can use SNES solvers. Note that this 56 */ 57 #include <petscdm.h> 58 #include <petscdmda.h> 59 #include <petscsnes.h> 60 61 /* 62 User-defined routines 63 */ 64 extern PetscErrorCode FormMatrix(DM,Mat); 65 extern PetscErrorCode MyComputeFunction(SNES,Vec,Vec,void*); 66 extern PetscErrorCode MyComputeJacobian(SNES,Vec,Mat,Mat,void*); 67 extern PetscErrorCode NonlinearGS(SNES,Vec); 68 69 int main(int argc,char **argv) 70 { 71 SNES snes; /* nonlinear solver */ 72 SNES psnes; /* nonlinear Gauss-Seidel approximate solver */ 73 Vec x,b; /* solution vector */ 74 PetscInt its; /* iterations for convergence */ 75 DM da; 76 PetscBool use_ngs_as_npc = PETSC_FALSE; /* use the nonlinear Gauss-Seidel approximate solver */ 77 78 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 79 Initialize program 80 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 81 82 PetscCall(PetscInitialize(&argc,&argv,(char*)0,help)); 83 84 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 85 Create nonlinear solver context 86 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 87 PetscCall(SNESCreate(PETSC_COMM_WORLD,&snes)); 88 89 PetscCall(PetscOptionsGetBool(NULL,NULL,"-use_ngs_as_npc",&use_ngs_as_npc,0)); 90 91 if (use_ngs_as_npc) { 92 PetscCall(SNESGetNPC(snes,&psnes)); 93 PetscCall(SNESSetType(psnes,SNESSHELL)); 94 PetscCall(SNESShellSetSolve(psnes,NonlinearGS)); 95 } 96 97 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 98 Create distributed array (DMDA) to manage parallel grid and vectors 99 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 100 PetscCall(DMDACreate2d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE,DMDA_STENCIL_STAR,4,4,PETSC_DECIDE,PETSC_DECIDE,1,1,NULL,NULL,&da)); 101 PetscCall(DMSetFromOptions(da)); 102 PetscCall(DMSetUp(da)); 103 PetscCall(DMDASetUniformCoordinates(da, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0)); 104 PetscCall(SNESSetDM(snes,da)); 105 if (use_ngs_as_npc) { 106 PetscCall(SNESShellSetContext(psnes,da)); 107 } 108 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 109 Extract global vectors from DMDA; then duplicate for remaining 110 vectors that are the same types 111 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 112 PetscCall(DMCreateGlobalVector(da,&x)); 113 PetscCall(DMCreateGlobalVector(da,&b)); 114 PetscCall(VecSet(b,1.0)); 115 116 PetscCall(SNESSetFunction(snes,NULL,MyComputeFunction,NULL)); 117 PetscCall(SNESSetJacobian(snes,NULL,NULL,MyComputeJacobian,NULL)); 118 119 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 120 Customize nonlinear solver; set runtime options 121 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 122 PetscCall(SNESSetFromOptions(snes)); 123 124 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 125 Solve nonlinear system 126 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 127 PetscCall(SNESSolve(snes,b,x)); 128 PetscCall(SNESGetIterationNumber(snes,&its)); 129 130 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 131 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 132 133 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 134 Free work space. All PETSc objects should be destroyed when they 135 are no longer needed. 136 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 137 PetscCall(VecDestroy(&x)); 138 PetscCall(VecDestroy(&b)); 139 PetscCall(SNESDestroy(&snes)); 140 PetscCall(DMDestroy(&da)); 141 PetscCall(PetscFinalize()); 142 return 0; 143 } 144 145 /* ------------------------------------------------------------------- */ 146 PetscErrorCode MyComputeFunction(SNES snes,Vec x,Vec F,void *ctx) 147 { 148 Mat J; 149 DM dm; 150 151 PetscFunctionBeginUser; 152 PetscCall(SNESGetDM(snes,&dm)); 153 PetscCall(DMGetApplicationContext(dm,&J)); 154 if (!J) { 155 PetscCall(DMSetMatType(dm,MATAIJ)); 156 PetscCall(DMCreateMatrix(dm,&J)); 157 PetscCall(MatSetDM(J, NULL)); 158 PetscCall(FormMatrix(dm,J)); 159 PetscCall(DMSetApplicationContext(dm,J)); 160 PetscCall(DMSetApplicationContextDestroy(dm,(PetscErrorCode (*)(void**))MatDestroy)); 161 } 162 PetscCall(MatMult(J,x,F)); 163 PetscFunctionReturn(0); 164 } 165 166 PetscErrorCode MyComputeJacobian(SNES snes,Vec x,Mat J,Mat Jp,void *ctx) 167 { 168 DM dm; 169 170 PetscFunctionBeginUser; 171 PetscCall(SNESGetDM(snes,&dm)); 172 PetscCall(FormMatrix(dm,Jp)); 173 PetscFunctionReturn(0); 174 } 175 176 PetscErrorCode FormMatrix(DM da,Mat jac) 177 { 178 PetscInt i,j,nrows = 0; 179 MatStencil col[5],row,*rows; 180 PetscScalar v[5],hx,hy,hxdhy,hydhx; 181 DMDALocalInfo info; 182 183 PetscFunctionBeginUser; 184 PetscCall(DMDAGetLocalInfo(da,&info)); 185 hx = 1.0/(PetscReal)(info.mx-1); 186 hy = 1.0/(PetscReal)(info.my-1); 187 hxdhy = hx/hy; 188 hydhx = hy/hx; 189 190 PetscCall(PetscMalloc1(info.ym*info.xm,&rows)); 191 /* 192 Compute entries for the locally owned part of the Jacobian. 193 - Currently, all PETSc parallel matrix formats are partitioned by 194 contiguous chunks of rows across the processors. 195 - Each processor needs to insert only elements that it owns 196 locally (but any non-local elements will be sent to the 197 appropriate processor during matrix assembly). 198 - Here, we set all entries for a particular row at once. 199 - We can set matrix entries either using either 200 MatSetValuesLocal() or MatSetValues(), as discussed above. 201 */ 202 for (j=info.ys; j<info.ys+info.ym; j++) { 203 for (i=info.xs; i<info.xs+info.xm; i++) { 204 row.j = j; row.i = i; 205 /* boundary points */ 206 if (i == 0 || j == 0 || i == info.mx-1 || j == info.my-1) { 207 v[0] = 2.0*(hydhx + hxdhy); 208 PetscCall(MatSetValuesStencil(jac,1,&row,1,&row,v,INSERT_VALUES)); 209 rows[nrows].i = i; 210 rows[nrows++].j = j; 211 } else { 212 /* interior grid points */ 213 v[0] = -hxdhy; col[0].j = j - 1; col[0].i = i; 214 v[1] = -hydhx; col[1].j = j; col[1].i = i-1; 215 v[2] = 2.0*(hydhx + hxdhy); col[2].j = row.j; col[2].i = row.i; 216 v[3] = -hydhx; col[3].j = j; col[3].i = i+1; 217 v[4] = -hxdhy; col[4].j = j + 1; col[4].i = i; 218 PetscCall(MatSetValuesStencil(jac,1,&row,5,col,v,INSERT_VALUES)); 219 } 220 } 221 } 222 223 /* 224 Assemble matrix, using the 2-step process: 225 MatAssemblyBegin(), MatAssemblyEnd(). 226 */ 227 PetscCall(MatAssemblyBegin(jac,MAT_FINAL_ASSEMBLY)); 228 PetscCall(MatAssemblyEnd(jac,MAT_FINAL_ASSEMBLY)); 229 PetscCall(MatZeroRowsColumnsStencil(jac,nrows,rows,2.0*(hydhx + hxdhy),NULL,NULL)); 230 PetscCall(PetscFree(rows)); 231 /* 232 Tell the matrix we will never add a new nonzero location to the 233 matrix. If we do, it will generate an error. 234 */ 235 PetscCall(MatSetOption(jac,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE)); 236 PetscFunctionReturn(0); 237 } 238 239 /* ------------------------------------------------------------------- */ 240 /* 241 Applies some sweeps on nonlinear Gauss-Seidel on each process 242 243 */ 244 PetscErrorCode NonlinearGS(SNES snes,Vec X) 245 { 246 PetscInt i,j,Mx,My,xs,ys,xm,ym,its,l; 247 PetscReal hx,hy,hxdhy,hydhx; 248 PetscScalar **x,F,J,u,uxx,uyy; 249 DM da; 250 Vec localX; 251 252 PetscFunctionBeginUser; 253 PetscCall(SNESGetTolerances(snes,NULL,NULL,NULL,&its,NULL)); 254 PetscCall(SNESShellGetContext(snes,&da)); 255 256 PetscCall(DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE)); 257 258 hx = 1.0/(PetscReal)(Mx-1); 259 hy = 1.0/(PetscReal)(My-1); 260 hxdhy = hx/hy; 261 hydhx = hy/hx; 262 263 PetscCall(DMGetLocalVector(da,&localX)); 264 265 for (l=0; l<its; l++) { 266 267 PetscCall(DMGlobalToLocalBegin(da,X,INSERT_VALUES,localX)); 268 PetscCall(DMGlobalToLocalEnd(da,X,INSERT_VALUES,localX)); 269 /* 270 Get a pointer to vector data. 271 - For default PETSc vectors, VecGetArray() returns a pointer to 272 the data array. Otherwise, the routine is implementation dependent. 273 - You MUST call VecRestoreArray() when you no longer need access to 274 the array. 275 */ 276 PetscCall(DMDAVecGetArray(da,localX,&x)); 277 278 /* 279 Get local grid boundaries (for 2-dimensional DMDA): 280 xs, ys - starting grid indices (no ghost points) 281 xm, ym - widths of local grid (no ghost points) 282 283 */ 284 PetscCall(DMDAGetCorners(da,&xs,&ys,NULL,&xm,&ym,NULL)); 285 286 for (j=ys; j<ys+ym; j++) { 287 for (i=xs; i<xs+xm; i++) { 288 if (i == 0 || j == 0 || i == Mx-1 || j == My-1) { 289 /* boundary conditions are all zero Dirichlet */ 290 x[j][i] = 0.0; 291 } else { 292 u = x[j][i]; 293 uxx = (2.0*u - x[j][i-1] - x[j][i+1])*hydhx; 294 uyy = (2.0*u - x[j-1][i] - x[j+1][i])*hxdhy; 295 F = uxx + uyy; 296 J = 2.0*(hydhx + hxdhy); 297 u = u - F/J; 298 299 x[j][i] = u; 300 } 301 } 302 } 303 304 /* 305 Restore vector 306 */ 307 PetscCall(DMDAVecRestoreArray(da,localX,&x)); 308 PetscCall(DMLocalToGlobalBegin(da,localX,INSERT_VALUES,X)); 309 PetscCall(DMLocalToGlobalEnd(da,localX,INSERT_VALUES,X)); 310 } 311 PetscCall(DMRestoreLocalVector(da,&localX)); 312 PetscFunctionReturn(0); 313 } 314 315 /*TEST 316 317 test: 318 args: -snes_monitor_short -snes_type nrichardson 319 requires: !single 320 321 test: 322 suffix: 2 323 args: -snes_monitor_short -ksp_monitor_short -ksp_type richardson -pc_type none -ksp_richardson_self_scale 324 requires: !single 325 326 test: 327 suffix: 3 328 args: -snes_monitor_short -snes_type ngmres 329 330 test: 331 suffix: 4 332 args: -snes_monitor_short -ksp_type gmres -ksp_monitor_short -pc_type none 333 334 test: 335 suffix: 5 336 args: -snes_monitor_short -snes_type ncg 337 338 test: 339 suffix: 6 340 args: -snes_monitor_short -ksp_type cg -ksp_monitor_short -pc_type none 341 342 test: 343 suffix: 7 344 args: -da_refine 2 -snes_monitor_short -pc_type mg -mg_levels_ksp_type richardson -mg_levels_pc_type none -mg_levels_ksp_monitor_short -mg_levels_ksp_richardson_self_scale -ksp_type richardson -ksp_monitor_short 345 requires: !single 346 347 test: 348 suffix: 8 349 args: -da_refine 2 -snes_monitor_short -snes_type fas -fas_levels_snes_monitor_short -fas_coarse_snes_type newtonls -fas_coarse_pc_type lu -fas_coarse_ksp_type preonly -snes_type fas -snes_rtol 1.e-5 350 351 test: 352 suffix: 9 353 args: -snes_monitor_short -ksp_type gmres -ksp_monitor_short -pc_type none -snes_type newtontrdc 354 355 test: 356 suffix: 10 357 args: -snes_monitor_short -ksp_type gmres -ksp_monitor_short -pc_type none -snes_type newtontrdc -snes_trdc_use_cauchy false 358 359 TEST*/ 360