1 static const char help[] = "-Laplacian u = b as a nonlinear problem.\n\n"; 2 3 /* 4 5 The linear and nonlinear versions of these should give almost identical results on this problem 6 7 Richardson 8 Nonlinear: 9 -snes_rtol 1.e-12 -snes_monitor -snes_type nrichardson -snes_linesearch_monitor 10 11 Linear: 12 -snes_rtol 1.e-12 -snes_monitor -ksp_rtol 1.e-12 -ksp_monitor -ksp_type richardson -pc_type none -ksp_richardson_self_scale -info 13 14 GMRES 15 Nonlinear: 16 -snes_rtol 1.e-12 -snes_monitor -snes_type ngmres 17 18 Linear: 19 -snes_rtol 1.e-12 -snes_monitor -ksp_type gmres -ksp_monitor -ksp_rtol 1.e-12 -pc_type none 20 21 CG 22 Nonlinear: 23 -snes_rtol 1.e-12 -snes_monitor -snes_type ncg -snes_linesearch_monitor 24 25 Linear: 26 -snes_rtol 1.e-12 -snes_monitor -ksp_type cg -ksp_monitor -ksp_rtol 1.e-12 -pc_type none 27 28 Multigrid 29 Linear: 30 1 level: 31 -snes_rtol 1.e-12 -snes_monitor -pc_type mg -mg_levels_ksp_type richardson -mg_levels_pc_type none -mg_levels_ksp_monitor 32 -mg_levels_ksp_richardson_self_scale -ksp_type richardson -ksp_monitor -ksp_rtol 1.e-12 -ksp_monitor_true_residual 33 34 n levels: 35 -da_refine n 36 37 Nonlinear: 38 1 level: 39 -snes_rtol 1.e-12 -snes_monitor -snes_type fas -fas_levels_snes_monitor 40 41 n levels: 42 -da_refine n -fas_coarse_snes_type newtonls -fas_coarse_pc_type lu -fas_coarse_ksp_type preonly 43 44 */ 45 46 /* 47 Include "petscdmda.h" so that we can use distributed arrays (DMDAs). 48 Include "petscsnes.h" so that we can use SNES solvers. Note that this 49 */ 50 #include <petscdm.h> 51 #include <petscdmda.h> 52 #include <petscsnes.h> 53 54 /* 55 User-defined routines 56 */ 57 extern PetscErrorCode FormMatrix(DM,Mat); 58 extern PetscErrorCode MyComputeFunction(SNES,Vec,Vec,void*); 59 extern PetscErrorCode MyComputeJacobian(SNES,Vec,Mat,Mat,void*); 60 extern PetscErrorCode NonlinearGS(SNES,Vec); 61 62 int main(int argc,char **argv) 63 { 64 SNES snes; /* nonlinear solver */ 65 SNES psnes; /* nonlinear Gauss-Seidel approximate solver */ 66 Vec x,b; /* solution vector */ 67 PetscInt its; /* iterations for convergence */ 68 DM da; 69 PetscBool use_ngs_as_npc = PETSC_FALSE; /* use the nonlinear Gauss-Seidel approximate solver */ 70 71 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 72 Initialize program 73 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 74 75 PetscCall(PetscInitialize(&argc,&argv,(char*)0,help)); 76 77 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 78 Create nonlinear solver context 79 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 80 PetscCall(SNESCreate(PETSC_COMM_WORLD,&snes)); 81 82 PetscCall(PetscOptionsGetBool(NULL,NULL,"-use_ngs_as_npc",&use_ngs_as_npc,0)); 83 84 if (use_ngs_as_npc) { 85 PetscCall(SNESGetNPC(snes,&psnes)); 86 PetscCall(SNESSetType(psnes,SNESSHELL)); 87 PetscCall(SNESShellSetSolve(psnes,NonlinearGS)); 88 } 89 90 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 91 Create distributed array (DMDA) to manage parallel grid and vectors 92 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 93 PetscCall(DMDACreate2d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE,DMDA_STENCIL_STAR,4,4,PETSC_DECIDE,PETSC_DECIDE,1,1,NULL,NULL,&da)); 94 PetscCall(DMSetFromOptions(da)); 95 PetscCall(DMSetUp(da)); 96 PetscCall(DMDASetUniformCoordinates(da, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0)); 97 PetscCall(SNESSetDM(snes,da)); 98 if (use_ngs_as_npc) PetscCall(SNESShellSetContext(psnes,da)); 99 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 100 Extract global vectors from DMDA; then duplicate for remaining 101 vectors that are the same types 102 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 103 PetscCall(DMCreateGlobalVector(da,&x)); 104 PetscCall(DMCreateGlobalVector(da,&b)); 105 PetscCall(VecSet(b,1.0)); 106 107 PetscCall(SNESSetFunction(snes,NULL,MyComputeFunction,NULL)); 108 PetscCall(SNESSetJacobian(snes,NULL,NULL,MyComputeJacobian,NULL)); 109 110 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 111 Customize nonlinear solver; set runtime options 112 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 113 PetscCall(SNESSetFromOptions(snes)); 114 115 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 116 Solve nonlinear system 117 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 118 PetscCall(SNESSolve(snes,b,x)); 119 PetscCall(SNESGetIterationNumber(snes,&its)); 120 121 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 122 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 123 124 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 125 Free work space. All PETSc objects should be destroyed when they 126 are no longer needed. 127 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 128 PetscCall(VecDestroy(&x)); 129 PetscCall(VecDestroy(&b)); 130 PetscCall(SNESDestroy(&snes)); 131 PetscCall(DMDestroy(&da)); 132 PetscCall(PetscFinalize()); 133 return 0; 134 } 135 136 /* ------------------------------------------------------------------- */ 137 PetscErrorCode MyComputeFunction(SNES snes,Vec x,Vec F,void *ctx) 138 { 139 Mat J; 140 DM dm; 141 142 PetscFunctionBeginUser; 143 PetscCall(SNESGetDM(snes,&dm)); 144 PetscCall(DMGetApplicationContext(dm,&J)); 145 if (!J) { 146 PetscCall(DMSetMatType(dm,MATAIJ)); 147 PetscCall(DMCreateMatrix(dm,&J)); 148 PetscCall(MatSetDM(J, NULL)); 149 PetscCall(FormMatrix(dm,J)); 150 PetscCall(DMSetApplicationContext(dm,J)); 151 PetscCall(DMSetApplicationContextDestroy(dm,(PetscErrorCode (*)(void**))MatDestroy)); 152 } 153 PetscCall(MatMult(J,x,F)); 154 PetscFunctionReturn(0); 155 } 156 157 PetscErrorCode MyComputeJacobian(SNES snes,Vec x,Mat J,Mat Jp,void *ctx) 158 { 159 DM dm; 160 161 PetscFunctionBeginUser; 162 PetscCall(SNESGetDM(snes,&dm)); 163 PetscCall(FormMatrix(dm,Jp)); 164 PetscFunctionReturn(0); 165 } 166 167 PetscErrorCode FormMatrix(DM da,Mat jac) 168 { 169 PetscInt i,j,nrows = 0; 170 MatStencil col[5],row,*rows; 171 PetscScalar v[5],hx,hy,hxdhy,hydhx; 172 DMDALocalInfo info; 173 174 PetscFunctionBeginUser; 175 PetscCall(DMDAGetLocalInfo(da,&info)); 176 hx = 1.0/(PetscReal)(info.mx-1); 177 hy = 1.0/(PetscReal)(info.my-1); 178 hxdhy = hx/hy; 179 hydhx = hy/hx; 180 181 PetscCall(PetscMalloc1(info.ym*info.xm,&rows)); 182 /* 183 Compute entries for the locally owned part of the Jacobian. 184 - Currently, all PETSc parallel matrix formats are partitioned by 185 contiguous chunks of rows across the processors. 186 - Each processor needs to insert only elements that it owns 187 locally (but any non-local elements will be sent to the 188 appropriate processor during matrix assembly). 189 - Here, we set all entries for a particular row at once. 190 - We can set matrix entries either using either 191 MatSetValuesLocal() or MatSetValues(), as discussed above. 192 */ 193 for (j=info.ys; j<info.ys+info.ym; j++) { 194 for (i=info.xs; i<info.xs+info.xm; i++) { 195 row.j = j; row.i = i; 196 /* boundary points */ 197 if (i == 0 || j == 0 || i == info.mx-1 || j == info.my-1) { 198 v[0] = 2.0*(hydhx + hxdhy); 199 PetscCall(MatSetValuesStencil(jac,1,&row,1,&row,v,INSERT_VALUES)); 200 rows[nrows].i = i; 201 rows[nrows++].j = j; 202 } else { 203 /* interior grid points */ 204 v[0] = -hxdhy; col[0].j = j - 1; col[0].i = i; 205 v[1] = -hydhx; col[1].j = j; col[1].i = i-1; 206 v[2] = 2.0*(hydhx + hxdhy); col[2].j = row.j; col[2].i = row.i; 207 v[3] = -hydhx; col[3].j = j; col[3].i = i+1; 208 v[4] = -hxdhy; col[4].j = j + 1; col[4].i = i; 209 PetscCall(MatSetValuesStencil(jac,1,&row,5,col,v,INSERT_VALUES)); 210 } 211 } 212 } 213 214 /* 215 Assemble matrix, using the 2-step process: 216 MatAssemblyBegin(), MatAssemblyEnd(). 217 */ 218 PetscCall(MatAssemblyBegin(jac,MAT_FINAL_ASSEMBLY)); 219 PetscCall(MatAssemblyEnd(jac,MAT_FINAL_ASSEMBLY)); 220 PetscCall(MatZeroRowsColumnsStencil(jac,nrows,rows,2.0*(hydhx + hxdhy),NULL,NULL)); 221 PetscCall(PetscFree(rows)); 222 /* 223 Tell the matrix we will never add a new nonzero location to the 224 matrix. If we do, it will generate an error. 225 */ 226 PetscCall(MatSetOption(jac,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE)); 227 PetscFunctionReturn(0); 228 } 229 230 /* ------------------------------------------------------------------- */ 231 /* 232 Applies some sweeps on nonlinear Gauss-Seidel on each process 233 234 */ 235 PetscErrorCode NonlinearGS(SNES snes,Vec X) 236 { 237 PetscInt i,j,Mx,My,xs,ys,xm,ym,its,l; 238 PetscReal hx,hy,hxdhy,hydhx; 239 PetscScalar **x,F,J,u,uxx,uyy; 240 DM da; 241 Vec localX; 242 243 PetscFunctionBeginUser; 244 PetscCall(SNESGetTolerances(snes,NULL,NULL,NULL,&its,NULL)); 245 PetscCall(SNESShellGetContext(snes,&da)); 246 247 PetscCall(DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE)); 248 249 hx = 1.0/(PetscReal)(Mx-1); 250 hy = 1.0/(PetscReal)(My-1); 251 hxdhy = hx/hy; 252 hydhx = hy/hx; 253 254 PetscCall(DMGetLocalVector(da,&localX)); 255 256 for (l=0; l<its; l++) { 257 258 PetscCall(DMGlobalToLocalBegin(da,X,INSERT_VALUES,localX)); 259 PetscCall(DMGlobalToLocalEnd(da,X,INSERT_VALUES,localX)); 260 /* 261 Get a pointer to vector data. 262 - For default PETSc vectors, VecGetArray() returns a pointer to 263 the data array. Otherwise, the routine is implementation dependent. 264 - You MUST call VecRestoreArray() when you no longer need access to 265 the array. 266 */ 267 PetscCall(DMDAVecGetArray(da,localX,&x)); 268 269 /* 270 Get local grid boundaries (for 2-dimensional DMDA): 271 xs, ys - starting grid indices (no ghost points) 272 xm, ym - widths of local grid (no ghost points) 273 274 */ 275 PetscCall(DMDAGetCorners(da,&xs,&ys,NULL,&xm,&ym,NULL)); 276 277 for (j=ys; j<ys+ym; j++) { 278 for (i=xs; i<xs+xm; i++) { 279 if (i == 0 || j == 0 || i == Mx-1 || j == My-1) { 280 /* boundary conditions are all zero Dirichlet */ 281 x[j][i] = 0.0; 282 } else { 283 u = x[j][i]; 284 uxx = (2.0*u - x[j][i-1] - x[j][i+1])*hydhx; 285 uyy = (2.0*u - x[j-1][i] - x[j+1][i])*hxdhy; 286 F = uxx + uyy; 287 J = 2.0*(hydhx + hxdhy); 288 u = u - F/J; 289 290 x[j][i] = u; 291 } 292 } 293 } 294 295 /* 296 Restore vector 297 */ 298 PetscCall(DMDAVecRestoreArray(da,localX,&x)); 299 PetscCall(DMLocalToGlobalBegin(da,localX,INSERT_VALUES,X)); 300 PetscCall(DMLocalToGlobalEnd(da,localX,INSERT_VALUES,X)); 301 } 302 PetscCall(DMRestoreLocalVector(da,&localX)); 303 PetscFunctionReturn(0); 304 } 305 306 /*TEST 307 308 test: 309 args: -snes_monitor_short -snes_type nrichardson 310 requires: !single 311 312 test: 313 suffix: 2 314 args: -snes_monitor_short -ksp_monitor_short -ksp_type richardson -pc_type none -ksp_richardson_self_scale 315 requires: !single 316 317 test: 318 suffix: 3 319 args: -snes_monitor_short -snes_type ngmres 320 321 test: 322 suffix: 4 323 args: -snes_monitor_short -ksp_type gmres -ksp_monitor_short -pc_type none 324 325 test: 326 suffix: 5 327 args: -snes_monitor_short -snes_type ncg 328 329 test: 330 suffix: 6 331 args: -snes_monitor_short -ksp_type cg -ksp_monitor_short -pc_type none 332 333 test: 334 suffix: 7 335 args: -da_refine 2 -snes_monitor_short -pc_type mg -mg_levels_ksp_type richardson -mg_levels_pc_type none -mg_levels_ksp_monitor_short -mg_levels_ksp_richardson_self_scale -ksp_type richardson -ksp_monitor_short 336 requires: !single 337 338 test: 339 suffix: 8 340 args: -da_refine 2 -snes_monitor_short -snes_type fas -fas_levels_snes_monitor_short -fas_coarse_snes_type newtonls -fas_coarse_pc_type lu -fas_coarse_ksp_type preonly -snes_type fas -snes_rtol 1.e-5 341 342 test: 343 suffix: 9 344 args: -snes_monitor_short -ksp_type gmres -ksp_monitor_short -pc_type none -snes_type newtontrdc 345 346 test: 347 suffix: 10 348 args: -snes_monitor_short -ksp_type gmres -ksp_monitor_short -pc_type none -snes_type newtontrdc -snes_trdc_use_cauchy false 349 350 TEST*/ 351