xref: /petsc/src/ksp/pc/tests/ex8f.F90 (revision 9b88ac225e01f016352a5f4cd90e158abe5f5675)
1!
2!   Tests PCMGSetResidual
3!
4! -----------------------------------------------------------------------
5#include <petsc/finclude/petscksp.h>
6module ex8fmodule
7  use petscksp
8  implicit none
9
10contains
11  subroutine MyResidual(A, b, x, r, ierr)
12    Mat A
13    Vec b, x, r
14    integer ierr
15  end
16
17end module ex8fmodule
18
19program main
20  use petscksp
21  use ex8fmodule
22  implicit none
23
24! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
25!                   Variable declarations
26! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
27!
28!  Variables:
29!     ksp     - linear solver context
30!     x, b, u  - approx solution, right-hand side, exact solution vectors
31!     A        - matrix that defines linear system
32!     its      - iterations for convergence
33!     norm     - norm of error in solution
34!     rctx     - random number context
35!
36
37  Mat A
38  Vec x, b, u
39  PC pc
40  PetscInt n, dim, istart, iend
41  PetscInt i, j, jj, ii, one, zero
42  PetscErrorCode ierr
43  PetscScalar v
44  PetscScalar pfive
45  KSP ksp
46
47! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
48!                 Beginning of program
49! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
50
51  PetscCallA(PetscInitialize(ierr))
52  pfive = .5
53  n = 6
54  dim = n*n
55  one = 1
56  zero = 0
57
58! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
59!      Compute the matrix and right-hand-side vector that define
60!      the linear system, Ax = b.
61! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
62
63!  Create parallel matrix, specifying only its global dimensions.
64!  When using MatCreate(), the matrix format can be specified at
65!  runtime. Also, the parallel partitioning of the matrix is
66!  determined by PETSc at runtime.
67
68  PetscCallA(MatCreate(PETSC_COMM_WORLD, A, ierr))
69  PetscCallA(MatSetSizes(A, PETSC_DECIDE, PETSC_DECIDE, dim, dim, ierr))
70  PetscCallA(MatSetFromOptions(A, ierr))
71  PetscCallA(MatSetUp(A, ierr))
72
73!  Currently, all PETSc parallel matrix formats are partitioned by
74!  contiguous chunks of rows across the processors.  Determine which
75!  rows of the matrix are locally owned.
76
77  PetscCallA(MatGetOwnershipRange(A, Istart, Iend, ierr))
78
79!  Set matrix elements in parallel.
80!   - Each processor needs to insert only elements that it owns
81!     locally (but any non-local elements will be sent to the
82!     appropriate processor during matrix assembly).
83!   - Always specify global rows and columns of matrix entries.
84
85  do II = Istart, Iend - 1
86    v = -1.0
87    i = II/n
88    j = II - i*n
89    if (i > 0) then
90      JJ = II - n
91      PetscCallA(MatSetValues(A, one, [II], one, [JJ], [v], ADD_VALUES, ierr))
92    end if
93    if (i < n - 1) then
94      JJ = II + n
95      PetscCallA(MatSetValues(A, one, [II], one, [JJ], [v], ADD_VALUES, ierr))
96    end if
97    if (j > 0) then
98      JJ = II - 1
99      PetscCallA(MatSetValues(A, one, [II], one, [JJ], [v], ADD_VALUES, ierr))
100    end if
101    if (j < n - 1) then
102      JJ = II + 1
103      PetscCallA(MatSetValues(A, one, [II], one, [JJ], [v], ADD_VALUES, ierr))
104    end if
105    v = 4.0
106    PetscCallA(MatSetValues(A, one, [II], one, [II], [v], ADD_VALUES, ierr))
107  end do
108
109!  Assemble matrix, using the 2-step process:
110!       MatAssemblyBegin(), MatAssemblyEnd()
111!  Computations can be done while messages are in transition
112!  by placing code between these two statements.
113
114  PetscCallA(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY, ierr))
115  PetscCallA(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY, ierr))
116
117!  Create parallel vectors.
118!   - Here, the parallel partitioning of the vector is determined by
119!     PETSc at runtime.  We could also specify the local dimensions
120!     if desired.
121!   - Note: We form 1 vector from scratch and then duplicate as needed.
122
123  PetscCallA(VecCreate(PETSC_COMM_WORLD, u, ierr))
124  PetscCallA(VecSetSizes(u, PETSC_DECIDE, dim, ierr))
125  PetscCallA(VecSetFromOptions(u, ierr))
126  PetscCallA(VecDuplicate(u, b, ierr))
127  PetscCallA(VecDuplicate(b, x, ierr))
128
129!  Set exact solution; then compute right-hand-side vector.
130
131  PetscCallA(VecSet(u, pfive, ierr))
132  PetscCallA(MatMult(A, u, b, ierr))
133
134! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
135!         Create the linear solver and set various options
136! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
137
138!  Create linear solver context
139
140  PetscCallA(KSPCreate(PETSC_COMM_WORLD, ksp, ierr))
141  PetscCallA(KSPGetPC(ksp, pc, ierr))
142  PetscCallA(PCSetType(pc, PCMG, ierr))
143  PetscCallA(PCMGSetLevels(pc, one, PETSC_NULL_MPI_COMM, ierr))
144  PetscCallA(PCMGSetResidual(pc, zero, MyResidual, A, ierr))
145
146!  Set operators. Here the matrix that defines the linear system
147!  also serves as the matrix used to construct the preconditioner.
148
149  PetscCallA(KSPSetOperators(ksp, A, A, ierr))
150
151  PetscCallA(KSPDestroy(ksp, ierr))
152  PetscCallA(VecDestroy(u, ierr))
153  PetscCallA(VecDestroy(x, ierr))
154  PetscCallA(VecDestroy(b, ierr))
155  PetscCallA(MatDestroy(A, ierr))
156
157  PetscCallA(PetscFinalize(ierr))
158end
159
160!/*TEST
161!
162!   test:
163!      nsize: 1
164!      output_file: output/empty.out
165!TEST*/
166