xref: /petsc/src/ksp/ksp/tutorials/ex11f.F90 (revision 82bdbe8dbc3bf8405b2dbf2d49bab168b8512162)
1!
2!  Description: Solves a complex linear system in parallel with KSP (Fortran code).
3!
4
5!
6!  The model problem:
7!     Solve Helmholtz equation on the unit square: (0,1) x (0,1)
8!          -delta u - sigma1*u + i*sigma2*u = f,
9!           where delta = Laplace operator
10!     Dirichlet b.c.'s on all sides
11!     Use the 2-D, five-point finite difference stencil.
12!
13!     Compiling the code:
14!      This code uses the complex numbers version of PETSc, so configure
15!      must be run to enable this
16!
17!
18! -----------------------------------------------------------------------
19
20      program main
21#include <petsc/finclude/petscksp.h>
22      use petscksp
23      implicit none
24
25!
26! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
27!                   Variable declarations
28! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
29!
30!  Variables:
31!     ksp     - linear solver context
32!     x, b, u  - approx solution, right-hand-side, exact solution vectors
33!     A        - matrix that defines linear system
34!     its      - iterations for convergence
35!     norm     - norm of error in solution
36!     rctx     - random number context
37!
38
39      KSP             ksp
40      Mat              A
41      Vec              x,b,u
42      PetscRandom      rctx
43      PetscReal norm,h2,sigma1
44      PetscScalar  none,sigma2,v,pfive,czero
45      PetscScalar  cone
46      PetscInt dim,its,n,Istart
47      PetscInt Iend,i,j,II,JJ,one
48      PetscErrorCode ierr
49      PetscMPIInt rank
50      PetscBool  flg
51      logical          use_random
52
53! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
54!                 Beginning of program
55! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
56
57      call PetscInitialize(PETSC_NULL_CHARACTER,ierr)
58      if (ierr .ne. 0) then
59        print*,'Unable to initialize PETSc'
60        stop
61      endif
62
63      none   = -1.0
64      n      = 6
65      sigma1 = 100.0
66      czero  = 0.0
67      cone   = PETSC_i
68      call MPI_Comm_rank(PETSC_COMM_WORLD,rank,ierr)
69      call PetscOptionsGetReal(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-sigma1',sigma1,flg,ierr)
70      call PetscOptionsGetInt(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-n',n,flg,ierr)
71      dim    = n*n
72
73! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
74!      Compute the matrix and right-hand-side vector that define
75!      the linear system, Ax = b.
76! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
77
78!  Create parallel matrix, specifying only its global dimensions.
79!  When using MatCreate(), the matrix format can be specified at
80!  runtime. Also, the parallel partitioning of the matrix is
81!  determined by PETSc at runtime.
82
83      call MatCreate(PETSC_COMM_WORLD,A,ierr)
84      call MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,dim,dim,ierr)
85      call MatSetFromOptions(A,ierr)
86      call MatSetUp(A,ierr)
87
88!  Currently, all PETSc parallel matrix formats are partitioned by
89!  contiguous chunks of rows across the processors.  Determine which
90!  rows of the matrix are locally owned.
91
92      call MatGetOwnershipRange(A,Istart,Iend,ierr)
93
94!  Set matrix elements in parallel.
95!   - Each processor needs to insert only elements that it owns
96!     locally (but any non-local elements will be sent to the
97!     appropriate processor during matrix assembly).
98!   - Always specify global rows and columns of matrix entries.
99
100      call PetscOptionsHasName(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-norandom',flg,ierr)
101      if (flg) then
102         use_random = .false.
103         sigma2 = 10.0*PETSC_i
104      else
105         use_random = .true.
106         call PetscRandomCreate(PETSC_COMM_WORLD,rctx,ierr)
107         call PetscRandomSetFromOptions(rctx,ierr)
108         call PetscRandomSetInterval(rctx,czero,cone,ierr)
109      endif
110      h2 = 1.0/real((n+1)*(n+1))
111
112      one = 1
113      do 10, II=Istart,Iend-1
114        v = -1.0
115        i = II/n
116        j = II - i*n
117        if (i.gt.0) then
118          JJ = II - n
119          call MatSetValues(A,one,II,one,JJ,v,ADD_VALUES,ierr)
120        endif
121        if (i.lt.n-1) then
122          JJ = II + n
123          call MatSetValues(A,one,II,one,JJ,v,ADD_VALUES,ierr)
124        endif
125        if (j.gt.0) then
126          JJ = II - 1
127          call MatSetValues(A,one,II,one,JJ,v,ADD_VALUES,ierr)
128        endif
129        if (j.lt.n-1) then
130          JJ = II + 1
131          call MatSetValues(A,one,II,one,JJ,v,ADD_VALUES,ierr)
132        endif
133        if (use_random) call PetscRandomGetValue(rctx,sigma2,ierr)
134        v = 4.0 - sigma1*h2 + sigma2*h2
135        call  MatSetValues(A,one,II,one,II,v,ADD_VALUES,ierr)
136 10   continue
137      if (use_random) call PetscRandomDestroy(rctx,ierr)
138
139!  Assemble matrix, using the 2-step process:
140!       MatAssemblyBegin(), MatAssemblyEnd()
141!  Computations can be done while messages are in transition
142!  by placing code between these two statements.
143
144      call MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY,ierr)
145      call MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY,ierr)
146
147!  Create parallel vectors.
148!   - Here, the parallel partitioning of the vector is determined by
149!     PETSc at runtime.  We could also specify the local dimensions
150!     if desired.
151!   - Note: We form 1 vector from scratch and then duplicate as needed.
152
153      call VecCreate(PETSC_COMM_WORLD,u,ierr)
154      call VecSetSizes(u,PETSC_DECIDE,dim,ierr)
155      call VecSetFromOptions(u,ierr)
156      call VecDuplicate(u,b,ierr)
157      call VecDuplicate(b,x,ierr)
158
159!  Set exact solution; then compute right-hand-side vector.
160
161      if (use_random) then
162         call PetscRandomCreate(PETSC_COMM_WORLD,rctx,ierr)
163         call PetscRandomSetFromOptions(rctx,ierr)
164         call VecSetRandom(u,rctx,ierr)
165      else
166         pfive = 0.5
167         call VecSet(u,pfive,ierr)
168      endif
169      call MatMult(A,u,b,ierr)
170
171! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
172!         Create the linear solver and set various options
173! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
174
175!  Create linear solver context
176
177      call KSPCreate(PETSC_COMM_WORLD,ksp,ierr)
178
179!  Set operators. Here the matrix that defines the linear system
180!  also serves as the preconditioning matrix.
181
182      call KSPSetOperators(ksp,A,A,ierr)
183
184!  Set runtime options, e.g.,
185!      -ksp_type <type> -pc_type <type> -ksp_monitor -ksp_rtol <rtol>
186
187      call KSPSetFromOptions(ksp,ierr)
188
189! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
190!                      Solve the linear system
191! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
192
193      call KSPSolve(ksp,b,x,ierr)
194
195! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
196!                     Check solution and clean up
197! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
198
199!  Check the error
200
201      call VecAXPY(x,none,u,ierr)
202      call VecNorm(x,NORM_2,norm,ierr)
203      call KSPGetIterationNumber(ksp,its,ierr)
204      if (rank .eq. 0) then
205        if (norm .gt. 1.e-12) then
206           write(6,100) norm,its
207        else
208           write(6,110) its
209        endif
210      endif
211  100 format('Norm of error ',e11.4,',iterations ',i5)
212  110 format('Norm of error < 1.e-12,iterations ',i5)
213
214!  Free work space.  All PETSc objects should be destroyed when they
215!  are no longer needed.
216
217      if (use_random) call PetscRandomDestroy(rctx,ierr)
218      call KSPDestroy(ksp,ierr)
219      call VecDestroy(u,ierr)
220      call VecDestroy(x,ierr)
221      call VecDestroy(b,ierr)
222      call MatDestroy(A,ierr)
223
224      call PetscFinalize(ierr)
225      end
226
227!
228!/*TEST
229!
230!   build:
231!      requires: complex
232!
233!   test:
234!      args: -n 6 -norandom -pc_type none -ksp_monitor_short -ksp_gmres_cgs_refinement_type refine_always
235!      output_file: output/ex11f_1.out
236!
237!TEST*/
238