xref: /petsc/src/binding/petsc4py/demo/hpddm/saddle_point.py (revision 6c5693054f5123506dab0f5da2d352ed973d0e50)
1# This script demonstrates solving a symmetric saddle-point linear system using PETSc and HPDDM preconditioner
2# It must be run with exactly 4 MPI processes
3# Example run:
4#   mpiexec -n 4 python3 saddle_point.py -ksp_monitor -ksp_type fgmres -ksp_max_it 10 -ksp_rtol 1e-4 -fieldsplit_ksp_max_it 100 -fieldsplit_pc_hpddm_levels_1_eps_nev 10 -fieldsplit_pc_hpddm_levels_1_st_share_sub_ksp -fieldsplit_pc_hpddm_has_neumann -fieldsplit_pc_hpddm_define_subdomains -fieldsplit_1_pc_hpddm_schur_precondition geneo -fieldsplit_pc_hpddm_coarse_pc_type cholesky -fieldsplit_pc_hpddm_levels_1_sub_pc_type lu -fieldsplit_ksp_type fgmres -fieldsplit_1_pc_hpddm_coarse_correction balanced -fieldsplit_1_pc_hpddm_levels_1_eps_gen_non_hermitian
5# For more options, see ${PETSC_DIR}/src/ksp/ksp/tutorials/ex87.c
6
7import sys
8import petsc4py
9# Initialize PETSc with command-line arguments
10petsc4py.init(sys.argv)
11from petsc4py import PETSc
12
13# Function to load matrices and index sets from binary files
14def mat_and_is_load(prefix, identifier, A, aux_IS, aux_Mat, rank, size):
15# Load an index set (IS) from binary file
16  sizes = PETSc.IS().load(PETSc.Viewer().createBinary(f"{prefix}{identifier}_sizes_{rank}_{size}.dat", "r", comm = PETSc.COMM_SELF))
17# Get indices from the loaded IS
18  idx   = sizes.getIndices()
19# Set the local and global sizes of the matrix
20  A.setSizes([[idx[0], idx[2]], [idx[1], idx[3]]])
21# Configure matrix using runtime options
22  A.setFromOptions()
23# Load matrix A from binary file
24  A = A.load(PETSc.Viewer().createBinary(f"{prefix}{identifier}.dat", "r", comm = PETSc.COMM_WORLD))
25
26# Load an index set (IS) from binary file
27  aux_IS.load(PETSc.Viewer().createBinary(f"{prefix}{identifier}_is_{rank}_{size}.dat", "r", comm = PETSc.COMM_SELF))
28# Load the Neumann matrix of the current process
29  aux_Mat.load(PETSc.Viewer().createBinary(f"{prefix}{identifier}_aux_{rank}_{size}.dat", "r", comm = PETSc.COMM_SELF))
30
31# Get the size of the communicator
32size = PETSc.COMM_WORLD.getSize()
33# Get the rank of the current process
34rank = PETSc.COMM_WORLD.getRank()
35if size != 4:
36  if rank == 0:
37    print("This example requires 4 processes")
38  quit()
39
40# Problem type (either 'elasticity' or 'stokes')
41system_str = PETSc.Options().getString("system", "elasticity")
42id_sys = 0 if system_str == "elasticity" else 1
43empty_A11 = False
44# Lower-left (1,1) block is never zero when problem type is 'elasticity'
45if id_sys == 1:
46  empty_A11 = PETSc.Options().getBool("empty_A11", False)
47
48# 2-by-2 block structure
49A = [None, None, None, None]
50# Auxiliary data only for the diagonal blocks
51aux_Mat = [None, None]
52aux_IS = [None, None]
53
54# Create placeholder objects for the diagonal blocks
55A[0] = PETSc.Mat().create(comm = PETSc.COMM_WORLD)
56A[0].setFromOptions()
57aux_IS[0] = PETSc.IS().create(comm = PETSc.COMM_SELF)
58aux_Mat[0] = PETSc.Mat().create(comm = PETSc.COMM_SELF)
59A[3] = PETSc.Mat().create(comm = PETSc.COMM_WORLD)
60A[3].setFromOptions()
61aux_IS[1] = PETSc.IS().create(comm = PETSc.COMM_SELF)
62aux_Mat[1] = PETSc.Mat().create(comm = PETSc.COMM_SELF)
63
64# Load directory for input data
65load_dir = PETSc.Options().getString("load_dir", "${DATAFILESPATH}/matrices/hpddm/GENEO")
66# Specific prefix for each problem
67prefix = f"{load_dir}/{ 'B' if id_sys == 1 else 'A' }"
68
69# Diagonal blocks and auxiliary data for PCHPDDM
70mat_and_is_load(prefix, "00", A[0], aux_IS[0], aux_Mat[0], rank, size)
71mat_and_is_load(prefix, "11", A[3], aux_IS[1], aux_Mat[1], rank, size)
72
73# Coherent off-diagonal (0,1) block
74A[2] = PETSc.Mat().create(comm = PETSc.COMM_WORLD)
75n, _ = A[0].getLocalSize()
76N, _ = A[0].getSize()
77m, _ = A[3].getLocalSize()
78M, _ = A[3].getSize()
79# Set matrix sizes based on the sizes of (0,0) and (1,1) blocks
80A[2].setSizes([[m, M], [n, N]])
81A[2].setFromOptions()
82A[2].load(PETSc.Viewer().createBinary(f"{load_dir}/{ 'B' if id_sys == 1 else 'A' }10.dat", "r", comm = PETSc.COMM_WORLD))
83# Create a matrix that behaves likes A[1]' without explicitly assembling it
84A[1] = PETSc.Mat().createTranspose(A[2])
85
86# Global MatNest
87S = PETSc.Mat().createNest([[A[0], A[1]], [A[2], A[3] if not empty_A11 else None]])
88
89ksp = PETSc.KSP().create(comm = PETSc.COMM_WORLD)
90ksp.setOperators(S)
91pc = ksp.getPC()
92
93# Use FIELDSPLIT as the outer preconditioner
94pc.setType(PETSc.PC.Type.FIELDSPLIT)
95# Use SCHUR since there are only two fields
96pc.setFieldSplitType(PETSc.PC.CompositeType.SCHUR)
97# Use SELF because HPDDM deals with a Schur complement matrix
98pc.setFieldSplitSchurPreType(PETSc.PC.FieldSplitSchurPreType.SELF)
99# Apply any command-line options (which may override options from the source file)
100pc.setFromOptions()
101# Setup the outer preconditioner so that one can query the inner preconditioners
102pc.setUp()
103
104# Retrieve the inner solvers (associated to the diagonal blocks)
105ksp0, ksp1 = pc.getFieldSplitSubKSP()
106
107# Upper-left (0,0) block
108pc0 = ksp0.getPC()
109# Use HPDDM as the preconditioner
110pc0.setType(PETSc.PC.Type.HPDDM)
111# Set the index set (local-to-global numbering) and auxiliary matrix (first diagonal block)
112pc0.setHPDDMAuxiliaryMat(aux_IS[0], aux_Mat[0])
113pc0.setFromOptions()
114
115# Schur complement
116pc1 = ksp1.getPC()
117# Use HPDDM as the preconditioner
118pc1.setType(PETSc.PC.Type.HPDDM)
119if not empty_A11:
120# Set the index set (local-to-global numbering) and auxiliary matrix (second diagonal block)
121# If there is no block (-empty_A11), then these are computed automatically by HPDDM
122  pc1.setHPDDMAuxiliaryMat(aux_IS[1], aux_Mat[1])
123pc1.setFromOptions()
124
125# Create RHS (b) and solution (x) vectors, load b from file, and solve the system
126b, x = S.createVecs()
127b.load(PETSc.Viewer().createBinary(f"{load_dir}/rhs_{ 'B' if id_sys == 1 else 'A' }.dat", "r", comm = PETSc.COMM_WORLD))
128ksp.setFromOptions()
129ksp.solve(b, x)
130