static const char help[] = "-Laplacian u = b as a nonlinear problem.\n\n"; /*T Concepts: SNES^parallel Bratu example Concepts: DMDA^using distributed arrays; Concepts: IS coloirng types; Processors: n T*/ /* The linear and nonlinear versions of these should give almost identical results on this problem Richardson Nonlinear: -snes_rtol 1.e-12 -snes_monitor -snes_type nrichardson -snes_linesearch_monitor Linear: -snes_rtol 1.e-12 -snes_monitor -ksp_rtol 1.e-12 -ksp_monitor -ksp_type richardson -pc_type none -ksp_richardson_self_scale -info GMRES Nonlinear: -snes_rtol 1.e-12 -snes_monitor -snes_type ngmres Linear: -snes_rtol 1.e-12 -snes_monitor -ksp_type gmres -ksp_monitor -ksp_rtol 1.e-12 -pc_type none CG Nonlinear: -snes_rtol 1.e-12 -snes_monitor -snes_type ncg -snes_linesearch_monitor Linear: -snes_rtol 1.e-12 -snes_monitor -ksp_type cg -ksp_monitor -ksp_rtol 1.e-12 -pc_type none Multigrid Linear: 1 level: -snes_rtol 1.e-12 -snes_monitor -pc_type mg -mg_levels_ksp_type richardson -mg_levels_pc_type none -mg_levels_ksp_monitor -mg_levels_ksp_richardson_self_scale -ksp_type richardson -ksp_monitor -ksp_rtol 1.e-12 -ksp_monitor_true_residual n levels: -da_refine n Nonlinear: 1 level: -snes_rtol 1.e-12 -snes_monitor -snes_type fas -fas_levels_snes_monitor n levels: -da_refine n -fas_coarse_snes_type newtonls -fas_coarse_pc_type lu -fas_coarse_ksp_type preonly */ /* Include "petscdmda.h" so that we can use distributed arrays (DMDAs). Include "petscsnes.h" so that we can use SNES solvers. Note that this */ #include #include #include /* User-defined routines */ extern PetscErrorCode FormMatrix(DM,Mat); extern PetscErrorCode MyComputeFunction(SNES,Vec,Vec,void*); extern PetscErrorCode MyComputeJacobian(SNES,Vec,Mat,Mat,void*); extern PetscErrorCode NonlinearGS(SNES,Vec); int main(int argc,char **argv) { SNES snes; /* nonlinear solver */ SNES psnes; /* nonlinear Gauss-Seidel approximate solver */ Vec x,b; /* solution vector */ PetscInt its; /* iterations for convergence */ PetscErrorCode ierr; DM da; PetscBool use_ngs_as_npc = PETSC_FALSE; /* use the nonlinear Gauss-Seidel approximate solver */ /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - Initialize program - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ ierr = PetscInitialize(&argc,&argv,(char*)0,help);if (ierr) return ierr; /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - Create nonlinear solver context - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ ierr = SNESCreate(PETSC_COMM_WORLD,&snes);CHKERRQ(ierr); ierr = PetscOptionsGetBool(NULL,NULL,"-use_ngs_as_npc",&use_ngs_as_npc,0);CHKERRQ(ierr); if (use_ngs_as_npc) { ierr = SNESGetNPC(snes,&psnes);CHKERRQ(ierr); ierr = SNESSetType(psnes,SNESSHELL);CHKERRQ(ierr); ierr = SNESShellSetSolve(psnes,NonlinearGS);CHKERRQ(ierr); } /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - Create distributed array (DMDA) to manage parallel grid and vectors - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ ierr = DMDACreate2d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE,DMDA_STENCIL_STAR,4,4,PETSC_DECIDE,PETSC_DECIDE,1,1,NULL,NULL,&da);CHKERRQ(ierr); ierr = DMSetFromOptions(da);CHKERRQ(ierr); ierr = DMSetUp(da);CHKERRQ(ierr); ierr = DMDASetUniformCoordinates(da, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0);CHKERRQ(ierr); ierr = SNESSetDM(snes,da);CHKERRQ(ierr); if (use_ngs_as_npc) { ierr = SNESShellSetContext(psnes,da);CHKERRQ(ierr); } /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - Extract global vectors from DMDA; then duplicate for remaining vectors that are the same types - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ ierr = DMCreateGlobalVector(da,&x);CHKERRQ(ierr); ierr = DMCreateGlobalVector(da,&b);CHKERRQ(ierr); ierr = VecSet(b,1.0);CHKERRQ(ierr); ierr = SNESSetFunction(snes,NULL,MyComputeFunction,NULL);CHKERRQ(ierr); ierr = SNESSetJacobian(snes,NULL,NULL,MyComputeJacobian,NULL);CHKERRQ(ierr); /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - Customize nonlinear solver; set runtime options - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ ierr = SNESSetFromOptions(snes);CHKERRQ(ierr); /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - Solve nonlinear system - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ ierr = SNESSolve(snes,b,x);CHKERRQ(ierr); ierr = SNESGetIterationNumber(snes,&its);CHKERRQ(ierr); /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - Free work space. All PETSc objects should be destroyed when they are no longer needed. - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ ierr = VecDestroy(&x);CHKERRQ(ierr); ierr = VecDestroy(&b);CHKERRQ(ierr); ierr = SNESDestroy(&snes);CHKERRQ(ierr); ierr = DMDestroy(&da);CHKERRQ(ierr); ierr = PetscFinalize(); return ierr; } /* ------------------------------------------------------------------- */ PetscErrorCode MyComputeFunction(SNES snes,Vec x,Vec F,void *ctx) { PetscErrorCode ierr; Mat J; DM dm; PetscFunctionBeginUser; ierr = SNESGetDM(snes,&dm);CHKERRQ(ierr); ierr = DMGetApplicationContext(dm,&J);CHKERRQ(ierr); if (!J) { ierr = DMSetMatType(dm,MATAIJ);CHKERRQ(ierr); ierr = DMCreateMatrix(dm,&J);CHKERRQ(ierr); ierr = MatSetDM(J, NULL);CHKERRQ(ierr); ierr = FormMatrix(dm,J);CHKERRQ(ierr); ierr = DMSetApplicationContext(dm,J);CHKERRQ(ierr); ierr = DMSetApplicationContextDestroy(dm,(PetscErrorCode (*)(void**))MatDestroy);CHKERRQ(ierr); } ierr = MatMult(J,x,F);CHKERRQ(ierr); PetscFunctionReturn(0); } PetscErrorCode MyComputeJacobian(SNES snes,Vec x,Mat J,Mat Jp,void *ctx) { PetscErrorCode ierr; DM dm; PetscFunctionBeginUser; ierr = SNESGetDM(snes,&dm);CHKERRQ(ierr); ierr = FormMatrix(dm,Jp);CHKERRQ(ierr); PetscFunctionReturn(0); } PetscErrorCode FormMatrix(DM da,Mat jac) { PetscErrorCode ierr; PetscInt i,j,nrows = 0; MatStencil col[5],row,*rows; PetscScalar v[5],hx,hy,hxdhy,hydhx; DMDALocalInfo info; PetscFunctionBeginUser; ierr = DMDAGetLocalInfo(da,&info);CHKERRQ(ierr); hx = 1.0/(PetscReal)(info.mx-1); hy = 1.0/(PetscReal)(info.my-1); hxdhy = hx/hy; hydhx = hy/hx; ierr = PetscMalloc1(info.ym*info.xm,&rows);CHKERRQ(ierr); /* Compute entries for the locally owned part of the Jacobian. - Currently, all PETSc parallel matrix formats are partitioned by contiguous chunks of rows across the processors. - Each processor needs to insert only elements that it owns locally (but any non-local elements will be sent to the appropriate processor during matrix assembly). - Here, we set all entries for a particular row at once. - We can set matrix entries either using either MatSetValuesLocal() or MatSetValues(), as discussed above. */ for (j=info.ys; j