xref: /petsc/src/ksp/ksp/tutorials/ex11f.F90 (revision 9b88ac225e01f016352a5f4cd90e158abe5f5675)
1!
2!  Description: Solves a complex linear system in parallel with KSP (Fortran code).
3!
4
5!
6!  The model problem:
7!     Solve Helmholtz equation on the unit square: (0,1) x (0,1)
8!          -delta u - sigma1*u + i*sigma2*u = f,
9!           where delta = Laplace operator
10!     Dirichlet b.c.'s on all sides
11!     Use the 2-D, five-point finite difference stencil.
12!
13!     Compiling the code:
14!      This code uses the complex numbers version of PETSc, so configure
15!      must be run to enable this
16!
17!
18! -----------------------------------------------------------------------
19#include <petsc/finclude/petscksp.h>
20program main
21  use petscksp
22  implicit none
23
24!
25! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
26!                   Variable declarations
27! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
28!
29!  Variables:
30!     ksp     - linear solver context
31!     x, b, u  - approx solution, right-hand-side, exact solution vectors
32!     A        - matrix that defines linear system
33!     its      - iterations for convergence
34!     norm     - norm of error in solution
35!     rctx     - random number context
36!
37
38  KSP ksp
39  Mat A
40  Vec x, b, u
41  PetscRandom rctx
42  PetscReal norm, h2, sigma1
43  PetscScalar none, sigma2, v, pfive, czero
44  PetscScalar cone
45  PetscInt dim, its, n, Istart
46  PetscInt Iend, i, j, II, JJ, one
47  PetscErrorCode ierr
48  PetscMPIInt rank
49  PetscBool flg
50  logical use_random
51
52! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
53!                 Beginning of program
54! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
55
56  PetscCallA(PetscInitialize(ierr))
57  none = -1.0
58  n = 6
59  sigma1 = 100.0
60  czero = 0.0
61  cone = PETSC_i
62  PetscCallMPIA(MPI_Comm_rank(PETSC_COMM_WORLD, rank, ierr))
63  PetscCallA(PetscOptionsGetReal(PETSC_NULL_OPTIONS, PETSC_NULL_CHARACTER, '-sigma1', sigma1, flg, ierr))
64  PetscCallA(PetscOptionsGetInt(PETSC_NULL_OPTIONS, PETSC_NULL_CHARACTER, '-n', n, flg, ierr))
65  dim = n*n
66
67! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
68!      Compute the matrix and right-hand-side vector that define
69!      the linear system, Ax = b.
70! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
71
72!  Create parallel matrix, specifying only its global dimensions.
73!  When using MatCreate(), the matrix format can be specified at
74!  runtime. Also, the parallel partitioning of the matrix is
75!  determined by PETSc at runtime.
76
77  PetscCallA(MatCreate(PETSC_COMM_WORLD, A, ierr))
78  PetscCallA(MatSetSizes(A, PETSC_DECIDE, PETSC_DECIDE, dim, dim, ierr))
79  PetscCallA(MatSetFromOptions(A, ierr))
80  PetscCallA(MatSetUp(A, ierr))
81
82!  Currently, all PETSc parallel matrix formats are partitioned by
83!  contiguous chunks of rows across the processors.  Determine which
84!  rows of the matrix are locally owned.
85
86  PetscCallA(MatGetOwnershipRange(A, Istart, Iend, ierr))
87
88!  Set matrix elements in parallel.
89!   - Each processor needs to insert only elements that it owns
90!     locally (but any non-local elements will be sent to the
91!     appropriate processor during matrix assembly).
92!   - Always specify global rows and columns of matrix entries.
93
94  PetscCallA(PetscOptionsHasName(PETSC_NULL_OPTIONS, PETSC_NULL_CHARACTER, '-norandom', flg, ierr))
95  if (flg) then
96    use_random = .false.
97    sigma2 = 10.0*PETSC_i
98  else
99    use_random = .true.
100    PetscCallA(PetscRandomCreate(PETSC_COMM_WORLD, rctx, ierr))
101    PetscCallA(PetscRandomSetFromOptions(rctx, ierr))
102    PetscCallA(PetscRandomSetInterval(rctx, czero, cone, ierr))
103  end if
104  h2 = 1.0/real((n + 1)*(n + 1))
105
106  one = 1
107  do II = Istart, Iend - 1
108    v = -1.0
109    i = II/n
110    j = II - i*n
111    if (i > 0) then
112      JJ = II - n
113      PetscCallA(MatSetValues(A, one, [II], one, [JJ], [v], ADD_VALUES, ierr))
114    end if
115    if (i < n - 1) then
116      JJ = II + n
117      PetscCallA(MatSetValues(A, one, [II], one, [JJ], [v], ADD_VALUES, ierr))
118    end if
119    if (j > 0) then
120      JJ = II - 1
121      PetscCallA(MatSetValues(A, one, [II], one, [JJ], [v], ADD_VALUES, ierr))
122    end if
123    if (j < n - 1) then
124      JJ = II + 1
125      PetscCallA(MatSetValues(A, one, [II], one, [JJ], [v], ADD_VALUES, ierr))
126    end if
127    if (use_random) PetscCallA(PetscRandomGetValue(rctx, sigma2, ierr))
128    v = 4.0 - sigma1*h2 + sigma2*h2
129    PetscCallA(MatSetValues(A, one, [II], one, [II], [v], ADD_VALUES, ierr))
130  end do
131  if (use_random) PetscCallA(PetscRandomDestroy(rctx, ierr))
132
133!  Assemble matrix, using the 2-step process:
134!       MatAssemblyBegin(), MatAssemblyEnd()
135!  Computations can be done while messages are in transition
136!  by placing code between these two statements.
137
138  PetscCallA(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY, ierr))
139  PetscCallA(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY, ierr))
140
141!  Create parallel vectors.
142!   - Here, the parallel partitioning of the vector is determined by
143!     PETSc at runtime.  We could also specify the local dimensions
144!     if desired.
145!   - Note: We form 1 vector from scratch and then duplicate as needed.
146
147  PetscCallA(VecCreate(PETSC_COMM_WORLD, u, ierr))
148  PetscCallA(VecSetSizes(u, PETSC_DECIDE, dim, ierr))
149  PetscCallA(VecSetFromOptions(u, ierr))
150  PetscCallA(VecDuplicate(u, b, ierr))
151  PetscCallA(VecDuplicate(b, x, ierr))
152
153!  Set exact solution; then compute right-hand-side vector.
154
155  if (use_random) then
156    PetscCallA(PetscRandomCreate(PETSC_COMM_WORLD, rctx, ierr))
157    PetscCallA(PetscRandomSetFromOptions(rctx, ierr))
158    PetscCallA(VecSetRandom(u, rctx, ierr))
159  else
160    pfive = 0.5
161    PetscCallA(VecSet(u, pfive, ierr))
162  end if
163  PetscCallA(MatMult(A, u, b, ierr))
164
165! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
166!         Create the linear solver and set various options
167! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
168
169!  Create linear solver context
170
171  PetscCallA(KSPCreate(PETSC_COMM_WORLD, ksp, ierr))
172
173!  Set operators. Here the matrix that defines the linear system
174!  also serves as the matrix used to construct the preconditioner.
175
176  PetscCallA(KSPSetOperators(ksp, A, A, ierr))
177
178!  Set runtime options, e.g.,
179!      -ksp_type <type> -pc_type <type> -ksp_monitor -ksp_rtol <rtol>
180
181  PetscCallA(KSPSetFromOptions(ksp, ierr))
182
183! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
184!                      Solve the linear system
185! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
186
187  PetscCallA(KSPSolve(ksp, b, x, ierr))
188
189! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
190!                     Check solution and clean up
191! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
192
193!  Check the error
194
195  PetscCallA(VecAXPY(x, none, u, ierr))
196  PetscCallA(VecNorm(x, NORM_2, norm, ierr))
197  PetscCallA(KSPGetIterationNumber(ksp, its, ierr))
198  if (rank == 0) then
199    if (norm > 1.e-12) then
200      write (6, 100) norm, its
201    else
202      write (6, 110) its
203    end if
204  end if
205100 format('Norm of error ', e11.4, ',iterations ', i5)
206110 format('Norm of error < 1.e-12,iterations ', i5)
207
208!  Free work space.  All PETSc objects should be destroyed when they
209!  are no longer needed.
210
211  if (use_random) PetscCallA(PetscRandomDestroy(rctx, ierr))
212  PetscCallA(KSPDestroy(ksp, ierr))
213  PetscCallA(VecDestroy(u, ierr))
214  PetscCallA(VecDestroy(x, ierr))
215  PetscCallA(VecDestroy(b, ierr))
216  PetscCallA(MatDestroy(A, ierr))
217
218  PetscCallA(PetscFinalize(ierr))
219end
220
221!
222!/*TEST
223!
224!   build:
225!      requires: complex
226!
227!   test:
228!      args: -n 6 -norandom -pc_type none -ksp_monitor_short -ksp_gmres_cgs_refinement_type refine_always
229!      output_file: output/ex11f_1.out
230!
231!TEST*/
232