1 #include <petscksp.h>
2 #include <petsc/private/petscimpl.h>
3
4 static char help[] = "Solves a saddle-point linear system using PCHPDDM.\n\n";
5
6 static PetscErrorCode MatAndISLoad(const char *prefix, const char *identifier, Mat A, IS is, Mat N, PetscMPIInt size);
7
main(int argc,char ** args)8 int main(int argc, char **args)
9 {
10 Vec b, x; /* computed solution and RHS */
11 Mat A[4], aux[2], S; /* linear system matrix */
12 KSP ksp, *subksp; /* linear solver context */
13 PC pc;
14 IS is[2];
15 PetscMPIInt size;
16 PetscInt m, M, n, N, id = 0;
17 PetscViewer viewer;
18 const char *const system[] = {"elasticity", "stokes", "diffusion", "lagrange"};
19 /* "elasticity":
20 * 2D linear elasticity with rubber-like and steel-like material coefficients, i.e., Poisson's ratio \in {0.4999, 0.35} and Young's modulus \in {0.01 GPa, 200.0 GPa}
21 * discretized by order 2 (resp. 0) Lagrange finite elements in displacements (resp. pressure) on a triangle mesh
22 * "stokes":
23 * 2D lid-driven cavity with constant viscosity
24 * discretized by order 2 (resp. 1) Lagrange finite elements, i.e., lowest-order Taylor--Hood finite elements, in velocities (resp. pressure) on a triangle mesh
25 * if the option -empty_A11 is not set (or set to false), a pressure with a zero mean-value is computed
26 * "diffusion":
27 * 2D primal-dual nonsymmetric diffusion equation
28 * discretized by order 2 (resp. 1) Lagrange finite elements in primal (resp. dual) unknowns on a triangle mesh
29 * "lagrange":
30 * 2D linear elasticity with essential boundary conditions imposed through a Lagrange multiplier
31 */
32 char dir[PETSC_MAX_PATH_LEN], prefix[PETSC_MAX_PATH_LEN];
33 PCType type;
34 PetscBool flg[4] = {PETSC_FALSE, PETSC_FALSE, PETSC_FALSE, PETSC_FALSE};
35
36 PetscFunctionBeginUser;
37 PetscCall(PetscInitialize(&argc, &args, NULL, help));
38 PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &size));
39 PetscCall(PetscOptionsGetEList(NULL, NULL, "-system", system, PETSC_STATIC_ARRAY_LENGTH(system), &id, NULL));
40 if (id == 1) PetscCall(PetscOptionsGetBool(NULL, NULL, "-empty_A11", flg, NULL));
41 if (id != 3) PetscCheck(size == 4, PETSC_COMM_WORLD, PETSC_ERR_WRONG_MPI_SIZE, "This example requires 4 processes");
42 else PetscCheck(id == 3 && size == 2, PETSC_COMM_WORLD, PETSC_ERR_WRONG_MPI_SIZE, "This example requires 2 processes");
43 for (PetscInt i = 0; i < 2; ++i) {
44 PetscCall(MatCreate(PETSC_COMM_WORLD, A + (i ? 3 : 0)));
45 if (id < 2 || (id == 3 && i == 0)) {
46 PetscCall(ISCreate(PETSC_COMM_SELF, is + i));
47 PetscCall(MatCreate(PETSC_COMM_SELF, aux + i));
48 } else {
49 is[i] = NULL;
50 aux[i] = NULL;
51 }
52 }
53 PetscCall(PetscStrncpy(dir, ".", sizeof(dir)));
54 PetscCall(PetscOptionsGetString(NULL, NULL, "-load_dir", dir, sizeof(dir), NULL));
55 /* loading matrices and auxiliary data for the diagonal blocks */
56 PetscCall(PetscSNPrintf(prefix, sizeof(prefix), "%s/%s", dir, id == 3 ? "D" : (id == 2 ? "E" : (id == 1 ? "B" : "A"))));
57 PetscCall(MatAndISLoad(prefix, "00", A[0], is[0], aux[0], size));
58 PetscCall(MatAndISLoad(prefix, "11", A[3], is[1], aux[1], size));
59 /* loading the off-diagonal block with a coherent row/column layout */
60 PetscCall(MatCreate(PETSC_COMM_WORLD, A + 2));
61 PetscCall(MatGetLocalSize(A[0], &n, NULL));
62 PetscCall(MatGetSize(A[0], &N, NULL));
63 PetscCall(MatGetLocalSize(A[3], &m, NULL));
64 PetscCall(MatGetSize(A[3], &M, NULL));
65 PetscCall(MatSetSizes(A[2], m, n, M, N));
66 PetscCall(PetscSNPrintf(prefix, sizeof(prefix), "%s/%s10.dat", dir, id == 3 ? "D" : (id == 2 ? "E" : (id == 1 ? "B" : "A"))));
67 PetscCall(PetscViewerBinaryOpen(PETSC_COMM_WORLD, prefix, FILE_MODE_READ, &viewer));
68 PetscCall(MatLoad(A[2], viewer));
69 PetscCall(PetscViewerDestroy(&viewer));
70 if (id != 2) {
71 /* transposing the off-diagonal block */
72 PetscCall(PetscOptionsGetBool(NULL, NULL, "-transpose", flg + 1, NULL));
73 PetscCall(PetscOptionsGetBool(NULL, NULL, "-permute", flg + 2, NULL));
74 PetscCall(PetscOptionsGetBool(NULL, NULL, "-explicit", flg + 3, NULL));
75 if (flg[1]) {
76 if (flg[2]) {
77 PetscCall(MatTranspose(A[2], MAT_INITIAL_MATRIX, A + 1));
78 PetscCall(MatDestroy(A + 2));
79 }
80 if (!flg[3]) PetscCall(MatCreateTranspose(A[2 - flg[2]], A + 1 + flg[2]));
81 else PetscCall(MatTranspose(A[2 - flg[2]], MAT_INITIAL_MATRIX, A + 1 + flg[2]));
82 } else {
83 if (flg[2]) {
84 PetscCall(MatHermitianTranspose(A[2], MAT_INITIAL_MATRIX, A + 1));
85 PetscCall(MatDestroy(A + 2));
86 }
87 if (!flg[3]) PetscCall(MatCreateHermitianTranspose(A[2 - flg[2]], A + 1 + flg[2]));
88 else PetscCall(MatHermitianTranspose(A[2 - flg[2]], MAT_INITIAL_MATRIX, A + 1 + flg[2]));
89 }
90 } else {
91 PetscCall(MatCreate(PETSC_COMM_WORLD, A + 1));
92 PetscCall(MatSetSizes(A[1], n, m, N, M));
93 PetscCall(PetscSNPrintf(prefix, sizeof(prefix), "%s/E01.dat", dir));
94 PetscCall(PetscViewerBinaryOpen(PETSC_COMM_WORLD, prefix, FILE_MODE_READ, &viewer));
95 PetscCall(MatLoad(A[1], viewer));
96 PetscCall(PetscViewerDestroy(&viewer));
97 PetscCall(MatConvert(A[0], MATBAIJ, MAT_INPLACE_MATRIX, A));
98 }
99 if (flg[0]) PetscCall(MatDestroy(A + 3));
100 else {
101 PetscCall(PetscOptionsGetBool(NULL, NULL, "-diagonal_A11", flg, NULL));
102 if (flg[0]) {
103 PetscCall(MatDestroy(A + 3));
104 PetscCall(MatCreateConstantDiagonal(PETSC_COMM_WORLD, m, m, M, M, PETSC_SMALL, A + 3));
105 }
106 }
107 flg[1] = PETSC_FALSE;
108 PetscCall(PetscOptionsGetBool(NULL, NULL, "-all_transpose", flg + 1, NULL));
109 if (flg[1] && flg[2]) {
110 PetscCall(MatTranspose(A[1], MAT_INITIAL_MATRIX, &S));
111 PetscCall(MatDestroy(A + 1));
112 PetscCall(MatCreateHermitianTranspose(S, A + 1));
113 PetscCall(MatDestroy(&S));
114 }
115 /* global coefficient matrix */
116 PetscCall(MatCreateNest(PETSC_COMM_WORLD, 2, NULL, 2, NULL, A, &S));
117 PetscCall(KSPCreate(PETSC_COMM_WORLD, &ksp));
118 PetscCall(KSPSetOperators(ksp, S, S));
119 PetscCall(KSPGetPC(ksp, &pc));
120 /* outer preconditioner */
121 PetscCall(PCSetType(pc, PCFIELDSPLIT));
122 PetscCall(PCFieldSplitSetType(pc, PC_COMPOSITE_SCHUR));
123 PetscCall(PCFieldSplitSetSchurPre(pc, PC_FIELDSPLIT_SCHUR_PRE_SELF, NULL));
124 PetscCall(PCSetFromOptions(pc));
125 PetscCall(PCGetType(pc, &type));
126 PetscCall(PetscStrcmp(type, PCFIELDSPLIT, flg + 1));
127 if (flg[1]) {
128 PetscCall(PCSetUp(pc));
129 PetscCall(PCFieldSplitGetSubKSP(pc, &n, &subksp));
130 PetscCall(KSPGetPC(subksp[0], &pc));
131 /* inner preconditioner associated to top-left block */
132 #if defined(PETSC_HAVE_HPDDM) && defined(PETSC_HAVE_DYNAMIC_LIBRARIES) && defined(PETSC_USE_SHARED_LIBRARIES)
133 PetscCall(PCSetType(pc, PCHPDDM));
134 PetscCall(PCHPDDMSetAuxiliaryMat(pc, is[0], aux[0], NULL, NULL));
135 #endif
136 PetscCall(PCSetFromOptions(pc));
137 PetscCall(KSPGetPC(subksp[1], &pc));
138 /* inner preconditioner associated to Schur complement, which will be set internally to PCKSP (or PCASM if the Schur complement is centralized on a single process) */
139 #if defined(PETSC_HAVE_HPDDM) && defined(PETSC_HAVE_DYNAMIC_LIBRARIES) && defined(PETSC_USE_SHARED_LIBRARIES)
140 PetscCall(PCSetType(pc, PCHPDDM));
141 if (!flg[0]) PetscCall(PCHPDDMSetAuxiliaryMat(pc, is[1], aux[1], NULL, NULL));
142 #endif
143 PetscCall(PCSetFromOptions(pc));
144 PetscCall(PetscFree(subksp));
145 } else PetscCall(MatSetBlockSize(A[0], 2));
146 PetscCall(KSPSetFromOptions(ksp));
147 PetscCall(MatCreateVecs(S, &b, &x));
148 if (id != 2) {
149 PetscCall(PetscSNPrintf(prefix, sizeof(prefix), "%s/rhs_%s.dat", dir, id == 3 ? "D" : (id == 1 ? "B" : "A")));
150 PetscCall(PetscViewerBinaryOpen(PETSC_COMM_WORLD, prefix, FILE_MODE_READ, &viewer));
151 PetscCall(VecLoad(b, viewer));
152 PetscCall(PetscViewerDestroy(&viewer));
153 } else PetscCall(VecSetRandom(b, NULL));
154 PetscCall(KSPSolve(ksp, b, x));
155 flg[1] = PETSC_FALSE;
156 PetscCall(PetscOptionsGetBool(NULL, NULL, "-viewer", flg + 1, NULL));
157 if (flg[1]) PetscCall(PCView(pc, PETSC_VIEWER_STDOUT_WORLD));
158 flg[1] = PETSC_FALSE;
159 PetscCall(PetscOptionsGetBool(NULL, NULL, "-successive_solves", flg + 1, NULL));
160 if (flg[1]) {
161 KSPConvergedReason reason[2];
162 PetscInt iterations[2];
163 PetscCall(KSPGetConvergedReason(ksp, reason));
164 PetscCall(KSPGetTotalIterations(ksp, iterations));
165 PetscCall(KSPMonitorCancel(ksp));
166 PetscCall(PetscOptionsClearValue(NULL, "-ksp_monitor"));
167 PetscCall(PetscObjectStateIncrease((PetscObject)S));
168 PetscCall(KSPGetPC(ksp, &pc));
169 PetscCall(PCSetUp(pc)); /* update PCFIELDSPLIT submatrices */
170 PetscCall(PCFieldSplitGetSubKSP(pc, &n, &subksp));
171 PetscCall(KSPGetPC(subksp[0], &pc));
172 #if defined(PETSC_HAVE_HPDDM) && defined(PETSC_HAVE_DYNAMIC_LIBRARIES) && defined(PETSC_USE_SHARED_LIBRARIES)
173 PetscCall(PCHPDDMSetAuxiliaryMat(pc, is[0], aux[0], NULL, NULL));
174 #endif
175 PetscCall(PCSetFromOptions(pc));
176 PetscCall(KSPGetPC(subksp[1], &pc));
177 #if defined(PETSC_HAVE_HPDDM) && defined(PETSC_HAVE_DYNAMIC_LIBRARIES) && defined(PETSC_USE_SHARED_LIBRARIES)
178 PetscCall(PCSetType(pc, PCHPDDM)); /* may have been set to PCKSP internally (or PCASM if the Schur complement is centralized on a single process), so need to enforce the proper PCType */
179 if (!flg[0]) PetscCall(PCHPDDMSetAuxiliaryMat(pc, is[1], aux[1], NULL, NULL));
180 #endif
181 PetscCall(PCSetFromOptions(pc));
182 PetscCall(PetscFree(subksp));
183 PetscCall(KSPSolve(ksp, b, x));
184 PetscCall(KSPGetConvergedReason(ksp, reason + 1));
185 PetscCall(KSPGetTotalIterations(ksp, iterations + 1));
186 iterations[1] -= iterations[0];
187 PetscCheck(reason[0] == reason[1] && PetscAbs(iterations[0] - iterations[1]) <= 3, PetscObjectComm((PetscObject)ksp), PETSC_ERR_PLIB, "Successive calls to KSPSolve() did not converge for the same reason (%s v. %s) or with the same number of iterations (+/- 3, %" PetscInt_FMT " v. %" PetscInt_FMT ")", KSPConvergedReasons[reason[0]], KSPConvergedReasons[reason[1]], iterations[0], iterations[1]);
188 }
189 PetscCall(VecDestroy(&x));
190 PetscCall(VecDestroy(&b));
191 PetscCall(KSPDestroy(&ksp));
192 PetscCall(MatDestroy(&S));
193 PetscCall(MatDestroy(A + 1));
194 PetscCall(MatDestroy(A + 2));
195 for (PetscInt i = 0; i < 2; ++i) {
196 PetscCall(MatDestroy(A + (i ? 3 : 0)));
197 PetscCall(MatDestroy(aux + i));
198 PetscCall(ISDestroy(is + i));
199 }
200 PetscCall(PetscFinalize());
201 return 0;
202 }
203
MatAndISLoad(const char * prefix,const char * identifier,Mat A,IS is,Mat aux,PetscMPIInt size)204 PetscErrorCode MatAndISLoad(const char *prefix, const char *identifier, Mat A, IS is, Mat aux, PetscMPIInt size)
205 {
206 Mat tmp[3];
207 IS sizes;
208 const PetscInt *idx;
209 PetscInt m;
210 PetscLayout map;
211 PetscViewer viewer;
212 char name[PETSC_MAX_PATH_LEN];
213
214 PetscFunctionBeginUser;
215 PetscCall(PetscSNPrintf(name, sizeof(name), "%s%s_sizes_%d.dat", prefix, identifier, size));
216 PetscCall(PetscViewerBinaryOpen(PETSC_COMM_WORLD, name, FILE_MODE_READ, &viewer));
217 PetscCall(ISCreate(PETSC_COMM_WORLD, &sizes));
218 PetscCall(ISLoad(sizes, viewer));
219 PetscCall(ISSetBlockSize(sizes, is && aux ? 5 : 4)); /* not mandatory but useful to check for proper sizes */
220 PetscCall(ISGetIndices(sizes, &idx));
221 PetscCall(MatSetSizes(A, idx[0], idx[1], idx[2], idx[3]));
222 if (is && aux) {
223 PetscCall(MatCreate(PETSC_COMM_WORLD, tmp));
224 PetscCall(MatSetSizes(tmp[0], idx[4], idx[4], PETSC_DETERMINE, PETSC_DETERMINE));
225 PetscCall(MatSetUp(tmp[0]));
226 }
227 PetscCall(ISRestoreIndices(sizes, &idx));
228 PetscCall(ISDestroy(&sizes));
229 PetscCall(PetscViewerDestroy(&viewer));
230 PetscCall(PetscSNPrintf(name, sizeof(name), "%s%s.dat", prefix, identifier));
231 PetscCall(PetscViewerBinaryOpen(PETSC_COMM_WORLD, name, FILE_MODE_READ, &viewer));
232 PetscCall(MatLoad(A, viewer));
233 PetscCall(PetscViewerDestroy(&viewer));
234 if (is && aux) {
235 PetscCall(ISCreate(PETSC_COMM_WORLD, &sizes));
236 PetscCall(MatGetLayouts(tmp[0], &map, NULL));
237 PetscCall(ISSetLayout(sizes, map));
238 PetscCall(PetscSNPrintf(name, sizeof(name), "%s%s_is_%d.dat", prefix, identifier, size));
239 PetscCall(PetscViewerBinaryOpen(PETSC_COMM_WORLD, name, FILE_MODE_READ, &viewer));
240 PetscCall(ISLoad(sizes, viewer));
241 PetscCall(ISGetLocalSize(sizes, &m));
242 PetscCall(ISGetIndices(sizes, &idx));
243 PetscCall(ISSetType(is, ISGENERAL));
244 PetscCall(ISGeneralSetIndices(is, m, idx, PETSC_COPY_VALUES));
245 PetscCall(ISRestoreIndices(sizes, &idx));
246 PetscCall(ISDestroy(&sizes));
247 PetscCall(PetscViewerDestroy(&viewer));
248 PetscCall(PetscSNPrintf(name, sizeof(name), "%s%s_aux_%d.dat", prefix, identifier, size));
249 PetscCall(PetscViewerBinaryOpen(PETSC_COMM_WORLD, name, FILE_MODE_READ, &viewer));
250 PetscCall(MatLoad(tmp[0], viewer));
251 PetscCall(PetscViewerDestroy(&viewer));
252 PetscCall(MatGetDiagonalBlock(tmp[0], tmp + 1));
253 PetscCall(MatDuplicate(tmp[1], MAT_COPY_VALUES, tmp + 2));
254 PetscCall(MatHeaderReplace(aux, tmp + 2));
255 PetscCall(MatDestroy(tmp));
256 }
257 PetscFunctionReturn(PETSC_SUCCESS);
258 }
259
260 /*TEST
261
262 testset:
263 requires: datafilespath hpddm slepc double !complex !defined(PETSC_USE_64BIT_INDICES) defined(PETSC_HAVE_DYNAMIC_LIBRARIES) defined(PETSC_USE_SHARED_LIBRARIES)
264 nsize: 4
265 args: -load_dir ${DATAFILESPATH}/matrices/hpddm/GENEO -ksp_monitor -ksp_rtol 1e-4 -fieldsplit_ksp_max_it 100 -fieldsplit_pc_hpddm_levels_1_eps_nev 10 -fieldsplit_pc_hpddm_levels_1_st_share_sub_ksp -fieldsplit_pc_hpddm_has_neumann -fieldsplit_pc_hpddm_define_subdomains -fieldsplit_1_pc_hpddm_schur_precondition geneo -fieldsplit_pc_hpddm_coarse_pc_type redundant -fieldsplit_pc_hpddm_coarse_redundant_pc_type cholesky -fieldsplit_pc_hpddm_levels_1_sub_pc_type lu -fieldsplit_ksp_type fgmres -ksp_type fgmres -ksp_max_it 10 -fieldsplit_1_pc_hpddm_coarse_correction balanced -fieldsplit_1_pc_hpddm_levels_1_eps_gen_non_hermitian -fieldsplit_1_pc_hpddm_coarse_p 2
266 test:
267 requires: mumps
268 suffix: 1
269 args: -viewer -system {{elasticity stokes}separate output} -fieldsplit_1_pc_hpddm_ksp_pc_side left -fieldsplit_1_pc_hpddm_levels_1_sub_mat_mumps_icntl_26 1
270 filter: grep -v -e "action of " -e " " -e "block size" -e "total: nonzeros=" -e "using I-node" -e "aij" -e "transpose" -e "diagonal" -e "total number of" -e " rows="
271 test:
272 requires: mumps
273 suffix: 2
274 output_file: output/ex87_1_system-stokes.out
275 args: -viewer -system stokes -empty_A11 -transpose {{false true}shared output} -permute {{false true}shared output} -fieldsplit_1_pc_hpddm_ksp_pc_side right -fieldsplit_1_pc_hpddm_coarse_mat_type baij -fieldsplit_1_pc_hpddm_levels_1_sub_mat_mumps_icntl_26 1 -explicit {{false true}shared output}
276 filter: grep -v -e "action of " -e " " -e "block size" -e "total: nonzeros=" -e "using I-node" -e "aij" -e "transpose" -e "diagonal" -e "total number of" -e " rows=" | sed -e "s/ right preconditioning/ left preconditioning/g" -e "s/ using UNPRECONDITIONED/ using PRECONDITIONED/g"
277 test:
278 suffix: 1_petsc
279 args: -system {{elasticity stokes}separate output} -fieldsplit_1_pc_hpddm_ksp_pc_side left -fieldsplit_1_pc_hpddm_levels_1_sub_pc_factor_mat_solver_type petsc -fieldsplit_1_pc_hpddm_levels_1_eps_threshold_absolute 0.3 -permute
280 test:
281 suffix: 2_petsc
282 output_file: output/ex87_1_petsc_system-stokes.out
283 args: -system stokes -empty_A11 -transpose -fieldsplit_1_pc_hpddm_ksp_pc_side right -fieldsplit_1_pc_hpddm_levels_1_sub_pc_factor_mat_solver_type petsc -fieldsplit_1_pc_hpddm_coarse_mat_type baij -fieldsplit_1_pc_hpddm_levels_1_eps_threshold_absolute 0.3 -fieldsplit_1_pc_hpddm_levels_1_sub_pc_factor_shift_type inblocks -successive_solves
284 filter: sed -e "s/type: transpose/type: hermitiantranspose/g"
285 test:
286 suffix: threshold
287 requires: !defined(PETSC_HAVE_MKL_SPARSE_SP2M_FEATURE)
288 output_file: output/ex87_1_petsc_system-elasticity.out
289 args: -fieldsplit_1_pc_hpddm_ksp_pc_side left -fieldsplit_1_pc_hpddm_levels_1_eps_threshold_absolute 0.2 -fieldsplit_1_pc_hpddm_coarse_mat_type {{baij sbaij}shared output} -successive_solves
290 testset:
291 requires: datafilespath hpddm slepc double !complex !defined(PETSC_USE_64BIT_INDICES) defined(PETSC_HAVE_DYNAMIC_LIBRARIES) defined(PETSC_USE_SHARED_LIBRARIES)
292 nsize: 4
293 args: -load_dir ${DATAFILESPATH}/matrices/hpddm/GENEO -ksp_monitor -ksp_rtol 1e-4 -fieldsplit_ksp_max_it 100 -fieldsplit_pc_hpddm_levels_1_st_share_sub_ksp -fieldsplit_pc_hpddm_define_subdomains -fieldsplit_1_pc_hpddm_schur_precondition geneo -fieldsplit_pc_hpddm_coarse_pc_type redundant -fieldsplit_pc_hpddm_coarse_redundant_pc_type cholesky -fieldsplit_pc_hpddm_levels_1_sub_pc_type lu -fieldsplit_ksp_type fgmres -ksp_type fgmres -ksp_max_it 10 -fieldsplit_1_pc_hpddm_coarse_correction balanced -fieldsplit_1_pc_hpddm_levels_1_eps_gen_non_hermitian -fieldsplit_1_pc_hpddm_coarse_p 2 -system stokes -fieldsplit_1_pc_hpddm_ksp_pc_side left -fieldsplit_1_pc_hpddm_levels_1_sub_pc_factor_mat_solver_type petsc -fieldsplit_1_pc_hpddm_levels_1_eps_threshold_absolute 0.3
294 test:
295 suffix: diagonal
296 output_file: output/ex87_1_petsc_system-stokes.out
297 args: -fieldsplit_pc_hpddm_levels_1_eps_nev 10 -fieldsplit_0_pc_hpddm_has_neumann -diagonal_A11 {{false true}shared output}
298 test:
299 suffix: harmonic_overlap_2
300 output_file: output/ex87_1_petsc_system-stokes.out
301 args: -fieldsplit_0_pc_hpddm_harmonic_overlap 2 -fieldsplit_0_pc_hpddm_levels_1_svd_nsv 20 -diagonal_A11 -permute {{false true}shared output} -all_transpose
302
303 test:
304 requires: datafilespath double !complex !defined(PETSC_USE_64BIT_INDICES) !hpddm !memkind
305 nsize: 4
306 suffix: selfp
307 output_file: output/empty.out
308 filter: grep -v "CONVERGED_RTOL iterations"
309 args: -load_dir ${DATAFILESPATH}/matrices/hpddm/GENEO -system stokes -ksp_rtol 1e-4 -ksp_converged_reason -ksp_max_it 30 -pc_type fieldsplit -pc_fieldsplit_type schur -fieldsplit_ksp_type preonly -pc_fieldsplit_schur_precondition selfp -fieldsplit_pc_type bjacobi -fieldsplit_sub_pc_type lu -transpose {{false true}shared output} -fieldsplit_1_mat_schur_complement_ainv_type lump
310
311 test:
312 requires: datafilespath hpddm slepc double !complex !defined(PETSC_USE_64BIT_INDICES) defined(PETSC_HAVE_DYNAMIC_LIBRARIES) defined(PETSC_USE_SHARED_LIBRARIES)
313 nsize: 4
314 suffix: nonsymmetric_least_squares
315 output_file: output/empty.out
316 filter: grep -v "CONVERGED_RTOL iterations"
317 args: -load_dir ${DATAFILESPATH}/matrices/hpddm/GENEO -system diffusion -ksp_rtol 1e-4 -ksp_converged_reason -ksp_max_it 20 -pc_type fieldsplit -pc_fieldsplit_type schur -fieldsplit_ksp_type preonly -fieldsplit_0_pc_type pbjacobi -prefix_push fieldsplit_1_ -pc_hpddm_schur_precondition least_squares -pc_hpddm_define_subdomains -prefix_push pc_hpddm_levels_1_ -sub_pc_type lu -sub_pc_factor_shift_type nonzero -eps_nev 5 -eps_gen_non_hermitian -st_share_sub_ksp -prefix_pop -prefix_pop -fieldsplit_1_mat_schur_complement_ainv_type {{diag blockdiag}shared output}
318
319 test:
320 requires: datafilespath hpddm slepc double !complex !defined(PETSC_USE_64BIT_INDICES) defined(PETSC_HAVE_DYNAMIC_LIBRARIES) defined(PETSC_USE_SHARED_LIBRARIES)
321 nsize: 2
322 suffix: lagrange
323 output_file: output/empty.out
324 filter: grep -v "CONVERGED_RTOL iterations"
325 args: -load_dir ${DATAFILESPATH}/matrices/hpddm/GENEO -ksp_rtol 1e-4 -fieldsplit_ksp_max_it 100 -fieldsplit_0_pc_hpddm_has_neumann -fieldsplit_0_pc_hpddm_levels_1_eps_nev 10 -fieldsplit_0_pc_hpddm_levels_1_st_share_sub_ksp -fieldsplit_0_pc_hpddm_define_subdomains -fieldsplit_1_pc_hpddm_schur_precondition geneo -fieldsplit_0_pc_hpddm_coarse_pc_type redundant -fieldsplit_0_pc_hpddm_coarse_redundant_pc_type cholesky -fieldsplit_0_pc_hpddm_levels_1_sub_pc_type lu -fieldsplit_ksp_type fgmres -ksp_type fgmres -ksp_max_it 10 -system lagrange -transpose {{false true}shared output} -successive_solves
326
327 test:
328 requires: datafilespath mumps double !complex !defined(PETSC_USE_64BIT_INDICES)
329 nsize: 4
330 suffix: mumps
331 output_file: output/empty.out
332 args: -load_dir ${DATAFILESPATH}/matrices/hpddm/GENEO -ksp_type preonly -system elasticity -pc_type cholesky -mat_mumps_icntl_15 1
333
334 TEST*/
335