xref: /petsc/src/binding/petsc4py/demo/legacy/petsc-examples/ksp/ex2.py (revision 226f8a8a5081bc6ad7227cd631662400f0d6e2a0)
1
2'''
3Ex2 from PETSc example files implemented for PETSc4py.
4https://petsc.org/release/src/ksp/ksp/tutorials/ex2.c.html
5By: Miguel Arriaga
6
7
8Solves a linear system in parallel with KSP.
9Input parameters include:
10    -view_exact_sol   : write exact solution vector to stdout
11    -m <mesh_x>       : number of mesh points in x-direction
12    -n <mesh_n>       : number of mesh points in y-direction
13
14
15Vec            x,b,u;    # approx solution, RHS, exact solution
16Mat            A;        # linear system matrix
17KSP            ksp;      # linear solver context
18PetscReal      norm;     # norm of solution error
19'''
20import sys
21import petsc4py
22petsc4py.init(sys.argv)
23from petsc4py import PETSc
24
25import numpy as np
26
27comm = PETSc.COMM_WORLD
28size = comm.getSize()
29rank = comm.getRank()
30
31OptDB = PETSc.Options()
32m  = OptDB.getInt('m', 8)
33n  = OptDB.getInt('n', 7)
34
35''' - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
36        Compute the matrix and right-hand-side vector that define
37        the linear system, Ax = b.
38    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - '''
39'''
40    Create parallel matrix, specifying only its global dimensions.
41    When using MatCreate(), the matrix format can be specified at
42    runtime. Also, the parallel partitioning of the matrix is
43    determined by PETSc at runtime.
44
45    Performance tuning note:  For problems of substantial size,
46    preallocation of matrix memory is crucial for attaining good
47    performance. See the matrix chapter of the users manual for details.
48'''
49
50A = PETSc.Mat().create(comm=comm)
51A.setSizes((m*n,m*n))
52A.setFromOptions()
53A.setPreallocationNNZ((5,5))
54
55'''
56    Currently, all PETSc parallel matrix formats are partitioned by
57    contiguous chunks of rows across the processors.  Determine which
58    rows of the matrix are locally owned.
59'''
60Istart,Iend = A.getOwnershipRange()
61
62'''
63    Set matrix elements for the 2-D, five-point stencil in parallel.
64    - Each processor needs to insert only elements that it owns
65    locally (but any non-local elements will be sent to the
66    appropriate processor during matrix assembly).
67    - Always specify global rows and columns of matrix entries.
68
69    Note: this uses the less common natural ordering that orders first
70    all the unknowns for x = h then for x = 2h etc; Hence you see J = Ii +- n
71    instead of J = I +- m as you might expect. The more standard ordering
72    would first do all variables for y = h, then y = 2h etc.
73'''
74
75for Ii in range(Istart,Iend):
76    v = -1.0
77    i = int(Ii/n)
78    j = int(Ii - i*n)
79
80    if (i>0):
81        J = Ii - n
82        A.setValues(Ii,J,v,addv=True)
83    if (i<m-1):
84        J = Ii + n
85        A.setValues(Ii,J,v,addv=True)
86    if (j>0):
87        J = Ii - 1
88        A.setValues(Ii,J,v,addv=True)
89    if (j<n-1):
90        J = Ii + 1
91        A.setValues(Ii,J,v,addv=True)
92
93    v = 4.0
94    A.setValues(Ii,Ii,v,addv=True)
95
96'''
97    Assemble matrix, using the 2-step process:
98    MatAssemblyBegin(), MatAssemblyEnd()
99    Computations can be done while messages are in transition
100    by placing code between these two statements.
101'''
102
103A.assemblyBegin(A.AssemblyType.FINAL)
104A.assemblyEnd(A.AssemblyType.FINAL)
105''' A is symmetric. Set symmetric flag to enable ICC/Cholesky preconditioner '''
106
107A.setOption(A.Option.SYMMETRIC,True)
108
109'''
110    Create parallel vectors.
111    - We form 1 vector from scratch and then duplicate as needed.
112    - When using VecCreate(), VecSetSizes and VecSetFromOptions()
113    in this example, we specify only the
114    vector's global dimension; the parallel partitioning is determined
115    at runtime.
116    - When solving a linear system, the vectors and matrices MUST
117    be partitioned accordingly.  PETSc automatically generates
118    appropriately partitioned matrices and vectors when MatCreate()
119    and VecCreate() are used with the same communicator.
120    - The user can alternatively specify the local vector and matrix
121    dimensions when more sophisticated partitioning is needed
122    (replacing the PETSC_DECIDE argument in the VecSetSizes() statement
123    below).
124'''
125
126u = PETSc.Vec().create(comm=comm)
127u.setSizes(m*n)
128u.setFromOptions()
129
130b = u.duplicate()
131x = b.duplicate()
132
133'''
134    Set exact solution; then compute right-hand-side vector.
135    By default we use an exact solution of a vector with all
136    elements of 1.0;
137'''
138u.set(1.0)
139b = A(u)
140
141'''
142    View the exact solution vector if desired
143'''
144flg = OptDB.getBool('view_exact_sol', False)
145if flg:
146    u.view()
147
148''' - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
149            Create the linear solver and set various options
150    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - '''
151ksp = PETSc.KSP().create(comm=comm)
152
153'''
154    Set operators. Here the matrix that defines the linear system
155    also serves as the matrix from which the preconditioner is constructed.
156'''
157ksp.setOperators(A,A)
158
159'''
160    Set linear solver defaults for this problem (optional).
161    - By extracting the KSP and PC contexts from the KSP context,
162    we can then directly call any KSP and PC routines to set
163    various options.
164    - The following two statements are optional; all of these
165    parameters could alternatively be specified at runtime via
166    KSPSetFromOptions().  All of these defaults can be
167    overridden at runtime, as indicated below.
168'''
169rtol=1.e-2/((m+1)*(n+1))
170ksp.setTolerances(rtol=rtol,atol=1.e-50)
171
172'''
173Set runtime options, e.g.,
174    -ksp_type <type> -pc_type <type> -ksp_monitor -ksp_rtol <rtol>
175These options will override those specified above as long as
176KSPSetFromOptions() is called _after_ any other customization
177routines.
178'''
179ksp.setFromOptions()
180
181''' - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
182                    Solve the linear system
183    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - '''
184
185ksp.solve(b,x)
186
187''' - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
188                    Check the solution and clean up
189    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - '''
190x = x - u # x.axpy(-1.0,u)
191norm = x.norm(PETSc.NormType.NORM_2)
192its = ksp.getIterationNumber()
193
194'''
195    Print convergence information.  PetscPrintf() produces a single
196    print statement from all processes that share a communicator.
197    An alternative is PetscFPrintf(), which prints to a file.
198'''
199if norm > rtol*10:
200    PETSc.Sys.Print("Norm of error {}, Iterations {}".format(norm,its),comm=comm)
201else:
202    if size==1:
203        PETSc.Sys.Print("- Serial OK",comm=comm)
204    else:
205        PETSc.Sys.Print("- Parallel OK",comm=comm)
206