xref: /petsc/src/binding/petsc4py/demo/poisson2d/poisson2d.py (revision bcee047adeeb73090d7e36cc71e39fc287cdbb97)
1# Poisson in 2D
2# =============
3#
4# Solve a constant coefficient Poisson problem on a regular grid. The source
5# code for this demo can be `downloaded here <../../_static/poisson2d.py>`__
6#
7# .. math::
8#
9#     - u_{xx} - u_{yy} = 1 \quad\textsf{in}\quad [0,1]^2\\
10#     u = 0 \quad\textsf{on the boundary.}
11
12# This is a naïve, parallel implementation, using :math:`n` interior grid
13# points per dimension and a lexicographic ordering of the nodes.
14
15# This code is kept as simple as possible. However, simplicity comes at a
16# price. Here we use a naive decomposition that does not lead to an optimal
17# communication complexity for the matrix-vector product. An optimal complexity
18# decomposition of a structured grid could be achieved using `PETSc.DMDA`.
19#
20# This demo is structured as a script to be executed using:
21#
22# .. code-block:: console
23#
24#   $ python poisson2d.py
25#
26# potentially with additional options passed at the end of the command.
27#
28# At the start of your script, call `petsc4py.init` passing `sys.argv` so that
29# command-line arguments to the script are passed through to PETSc.
30
31import sys
32import petsc4py
33
34petsc4py.init(sys.argv)
35
36# The full PETSc4py API is to be found in the `petsc4py.PETSc` module.
37
38from petsc4py import PETSc
39
40# PETSc is extensively programmable using the `PETSc.Options` database. For
41# more information see `working with PETSc Options <petsc_options>`.
42
43OptDB = PETSc.Options()
44
45# Grid size and spacing using a default value of ``5``. The user can specify a
46# different number of points in each direction by passing the ``-n`` option to
47# the script.
48
49n = OptDB.getInt('n', 5)
50h = 1.0 / (n + 1)
51
52# Sparse matrices are represented by `PETSc.Mat` objects.
53#
54# You can omit the ``comm`` argument if your objects live on
55# `PETSc.COMM_WORLD` but it is a dangerous choice to rely on default values
56# for such important arguments.
57
58A = PETSc.Mat()
59A.create(comm=PETSc.COMM_WORLD)
60
61# Specify global matrix shape with a tuple.
62
63A.setSizes((n * n, n * n))
64
65# The call above implicitly assumes that we leave the parallel decomposition of
66# the matrix rows to PETSc by using `PETSc.DECIDE` for local sizes.
67# It is equivalent to:
68#
69# .. code-block:: python
70#
71#     A.setSizes(((PETSc.DECIDE, n * n), (PETSc.DECIDE, n * n)))
72
73# Various `sparse matrix formats <petsc4py.PETSc.Mat.Type>` can be selected:
74
75A.setType(PETSc.Mat.Type.AIJ)
76
77# Finally we allow the user to set any options they want to on the matrix from
78# the command line:
79
80A.setFromOptions()
81
82# Insertion into some matrix types is vastly more efficient if we preallocate
83# space rather than allow this to happen dynamically. Here we hint the number
84# of nonzeros to be expected on each row.
85
86A.setPreallocationNNZ(5)
87
88# We can now write out our finite difference matrix assembly using conventional
89# Python syntax. `Mat.getOwnershipRange` is used to retrieve the range of rows
90# local to this processor.
91
92
93def index_to_grid(r):
94    """Convert a row number into a grid point."""
95    return (r // n, r % n)
96
97
98rstart, rend = A.getOwnershipRange()
99for row in range(rstart, rend):
100    i, j = index_to_grid(row)
101    A[row, row] = 4.0 / h**2
102    if i > 0:
103        column = row - n
104        A[row, column] = -1.0 / h**2
105    if i < n - 1:
106        column = row + n
107        A[row, column] = -1.0 / h**2
108    if j > 0:
109        column = row - 1
110        A[row, column] = -1.0 / h**2
111    if j < n - 1:
112        column = row + 1
113        A[row, column] = -1.0 / h**2
114
115# At this stage, any parallel synchronization required in the matrix assembly
116# process has not occurred. We achieve this by calling `Mat.assemblyBegin` and
117# then `Mat.assemblyEnd`.
118
119A.assemblyBegin()
120A.assemblyEnd()
121
122# We set up an additional option so that the user can print the matrix by
123# passing ``-view_mat`` to the script.
124
125A.viewFromOptions('-view_mat')
126
127# PETSc represents all linear solvers as preconditioned Krylov subspace methods
128# of type `PETSc.KSP`. Here we create a KSP object for a conjugate gradient
129# solver preconditioned with an algebraic multigrid method.
130
131ksp = PETSc.KSP()
132ksp.create(comm=A.getComm())
133ksp.setType(PETSc.KSP.Type.CG)
134ksp.getPC().setType(PETSc.PC.Type.GAMG)
135
136# We set the matrix in our linear solver and allow the user to program the
137# solver with options.
138
139ksp.setOperators(A)
140ksp.setFromOptions()
141
142# Since the matrix knows its size and parallel distribution, we can retrieve
143# appropriately-scaled vectors using `Mat.createVecs`. PETSc vectors are
144# objects of type `PETSc.Vec`. Here we set the right hand side of our system to
145# a vector of ones, and then solve.
146
147x, b = A.createVecs()
148b.set(1.0)
149ksp.solve(b, x)
150
151# Finally, allow the user to print the solution by passing ``-view_sol`` to the
152# script.
153
154x.viewFromOptions('-view_sol')
155
156# Things to try
157# -------------
158#
159# - Show the solution with ``-view_sol``.
160# - Show the matrix with ``-view_mat``.
161# - Change the resolution with ``-n``.
162# - Use a direct solver by passing ``-ksp_type preonly -pc_type lu``.
163# - Run in parallel on two processors using:
164#
165#   .. code-block:: console
166#
167#       mpiexec -n 2 python poisson2d.py
168