1 static char help[] = "Reads a PETSc matrix and vector from a file and solves the normal equations.\n\n";
2
3 /*
4 Include "petscksp.h" so that we can use KSP solvers. Note that this file
5 automatically includes:
6 petscsys.h - base PETSc routines petscvec.h - vectors
7 petscmat.h - matrices
8 petscis.h - index sets petscksp.h - Krylov subspace methods
9 petscviewer.h - viewers petscpc.h - preconditioners
10 */
11 #include <petscksp.h>
12 #include <petscviewerhdf5.h>
13
VecLoadIfExists_Private(Vec b,PetscViewer fd,PetscBool * has)14 static PetscErrorCode VecLoadIfExists_Private(Vec b, PetscViewer fd, PetscBool *has)
15 {
16 PetscBool hdf5 = PETSC_FALSE;
17
18 PetscFunctionBeginUser;
19 PetscCall(PetscObjectTypeCompare((PetscObject)fd, PETSCVIEWERHDF5, &hdf5));
20 if (hdf5) {
21 #if defined(PETSC_HAVE_HDF5)
22 PetscCall(PetscViewerHDF5HasObject(fd, (PetscObject)b, has));
23 if (*has) PetscCall(VecLoad(b, fd));
24 #else
25 SETERRQ(PETSC_COMM_WORLD, PETSC_ERR_SUP, "PETSc must be configured with HDF5 to use this feature");
26 #endif
27 } else {
28 PetscErrorCode ierrp;
29 PetscCall(PetscPushErrorHandler(PetscReturnErrorHandler, NULL));
30 ierrp = VecLoad(b, fd);
31 PetscCall(PetscPopErrorHandler());
32 *has = ierrp ? PETSC_FALSE : PETSC_TRUE;
33 }
34 PetscFunctionReturn(PETSC_SUCCESS);
35 }
36
main(int argc,char ** args)37 int main(int argc, char **args)
38 {
39 KSP ksp; /* linear solver context */
40 Mat A, N; /* matrix */
41 Vec x, b, r, Ab, v[2]; /* approx solution, RHS, residual */
42 PetscViewer fd; /* viewer */
43 char file[PETSC_MAX_PATH_LEN] = ""; /* input file name */
44 char file_x0[PETSC_MAX_PATH_LEN] = ""; /* name of input file with initial guess */
45 char A_name[128] = "A", b_name[128] = "b", x0_name[128] = "x0"; /* name of the matrix, RHS and initial guess */
46 KSPType ksptype;
47 PetscBool has;
48 PetscInt its, n, m;
49 PetscReal norm;
50 PetscBool nonzero_guess = PETSC_TRUE;
51 PetscBool solve_normal = PETSC_FALSE;
52 PetscBool solve_augmented = PETSC_FALSE;
53 PetscBool truncate = PETSC_FALSE;
54 PetscBool explicit_transpose = PETSC_FALSE;
55 PetscBool hdf5 = PETSC_FALSE;
56 PetscBool test_custom_layout = PETSC_FALSE;
57 PetscBool sbaij = PETSC_FALSE;
58 PetscMPIInt rank, size;
59
60 PetscFunctionBeginUser;
61 PetscCall(PetscInitialize(&argc, &args, NULL, help));
62 PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank));
63 PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &size));
64 /*
65 Determine files from which we read the linear system
66 (matrix, right-hand side and initial guess vector).
67 */
68 PetscCall(PetscOptionsGetString(NULL, NULL, "-f", file, sizeof(file), NULL));
69 PetscCall(PetscOptionsGetBool(NULL, NULL, "-truncate", &truncate, NULL));
70 if (!truncate) PetscCall(PetscOptionsGetString(NULL, NULL, "-f_x0", file_x0, sizeof(file_x0), NULL));
71 PetscCall(PetscOptionsGetString(NULL, NULL, "-A_name", A_name, sizeof(A_name), NULL));
72 PetscCall(PetscOptionsGetString(NULL, NULL, "-b_name", b_name, sizeof(b_name), NULL));
73 PetscCall(PetscOptionsGetString(NULL, NULL, "-x0_name", x0_name, sizeof(x0_name), NULL));
74 /*
75 Decide whether to solve the original system (-solve_normal 0)
76 or the normal equation (-solve_normal 1).
77 */
78 PetscCall(PetscOptionsGetBool(NULL, NULL, "-solve_normal", &solve_normal, NULL));
79 if (!solve_normal) PetscCall(PetscOptionsGetBool(NULL, NULL, "-solve_augmented", &solve_augmented, NULL));
80 if (solve_augmented) {
81 PetscCall(PetscOptionsGetBool(NULL, NULL, "-explicit_transpose", &explicit_transpose, NULL));
82 PetscCall(PetscOptionsGetBool(NULL, NULL, "-sbaij", &sbaij, NULL));
83 }
84 /*
85 Decide whether to use the HDF5 reader.
86 */
87 PetscCall(PetscOptionsGetBool(NULL, NULL, "-hdf5", &hdf5, NULL));
88 /*
89 Decide whether custom matrix layout will be tested.
90 */
91 PetscCall(PetscOptionsGetBool(NULL, NULL, "-test_custom_layout", &test_custom_layout, NULL));
92
93 /* -----------------------------------------------------------
94 Beginning of linear solver loop
95 ----------------------------------------------------------- */
96 /*
97 Loop through the linear solve 2 times.
98 - The intention here is to preload and solve a small system;
99 then load another (larger) system and solve it as well.
100 This process preloads the instructions with the smaller
101 system so that more accurate performance monitoring (via
102 -log_view) can be done with the larger one (that actually
103 is the system of interest).
104 */
105 PetscPreLoadBegin(PETSC_FALSE, "Load system");
106
107 /* - - - - - - - - - - - New Stage - - - - - - - - - - - - -
108 Load system
109 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
110
111 /*
112 Open binary file. Note that we use FILE_MODE_READ to indicate
113 reading from this file.
114 */
115 if (hdf5) {
116 #if defined(PETSC_HAVE_HDF5)
117 PetscCall(PetscViewerHDF5Open(PETSC_COMM_WORLD, file, FILE_MODE_READ, &fd));
118 PetscCall(PetscViewerPushFormat(fd, PETSC_VIEWER_HDF5_MAT));
119 #else
120 SETERRQ(PETSC_COMM_WORLD, PETSC_ERR_SUP, "PETSc must be configured with HDF5 to use this feature");
121 #endif
122 } else {
123 PetscCall(PetscViewerBinaryOpen(PETSC_COMM_WORLD, file, FILE_MODE_READ, &fd));
124 }
125
126 /*
127 Load the matrix.
128 Matrix type is set automatically but you can override it by MatSetType() prior to MatLoad().
129 Do that only if you really insist on the given type.
130 */
131 PetscCall(MatCreate(PETSC_COMM_WORLD, &A));
132 PetscCall(PetscObjectSetName((PetscObject)A, A_name));
133 PetscCall(MatSetFromOptions(A));
134 PetscCall(MatLoad(A, fd));
135 if (truncate) {
136 Mat P, B;
137 PetscInt M, N;
138 PetscCall(MatGetLocalSize(A, &m, &n));
139 PetscCall(MatGetSize(A, &M, &N));
140 PetscCall(MatCreateFromOptions(PETSC_COMM_WORLD, NULL, 1, m, PETSC_DECIDE, M, N / 1.5, &P));
141 PetscCall(MatGetOwnershipRangeColumn(P, &m, &n));
142 for (; m < n; ++m) PetscCall(MatSetValue(P, m, m, 1.0, INSERT_VALUES));
143 PetscCall(MatAssemblyBegin(P, MAT_FINAL_ASSEMBLY));
144 PetscCall(MatAssemblyEnd(P, MAT_FINAL_ASSEMBLY));
145 PetscCall(MatShift(P, 1.0));
146 PetscCall(MatMatMult(A, P, MAT_INITIAL_MATRIX, PETSC_DETERMINE, &B));
147 PetscCall(MatDestroy(&P));
148 PetscCall(MatDestroy(&A));
149 A = B;
150 }
151 if (test_custom_layout && size > 1) {
152 /* Perturb the local sizes and create the matrix anew */
153 PetscInt m1, n1;
154 PetscCall(MatGetLocalSize(A, &m, &n));
155 m = rank ? m - 1 : m + size - 1;
156 n = (rank == size - 1) ? n + size - 1 : n - 1;
157 PetscCall(MatDestroy(&A));
158 PetscCall(MatCreate(PETSC_COMM_WORLD, &A));
159 PetscCall(PetscObjectSetName((PetscObject)A, A_name));
160 PetscCall(MatSetSizes(A, m, n, PETSC_DECIDE, PETSC_DECIDE));
161 PetscCall(MatSetFromOptions(A));
162 PetscCall(MatLoad(A, fd));
163 PetscCall(MatGetLocalSize(A, &m1, &n1));
164 PetscCheck(m1 == m && n1 == n, PETSC_COMM_WORLD, PETSC_ERR_PLIB, "resulting sizes differ from requested ones: %" PetscInt_FMT " %" PetscInt_FMT " != %" PetscInt_FMT " %" PetscInt_FMT, m1, n1, m, n);
165 }
166 PetscCall(MatGetLocalSize(A, &m, &n));
167
168 /*
169 Load the RHS vector if it is present in the file, otherwise use a vector of all ones.
170 */
171 PetscCall(MatCreateVecs(A, &x, &b));
172 PetscCall(PetscObjectSetName((PetscObject)b, b_name));
173 PetscCall(VecSetFromOptions(b));
174 PetscCall(VecLoadIfExists_Private(b, fd, &has));
175 if (!has) {
176 PetscScalar one = 1.0;
177 PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Failed to load RHS, so use a vector of all ones.\n"));
178 PetscCall(VecSetFromOptions(b));
179 PetscCall(VecSet(b, one));
180 }
181
182 /*
183 Load the initial guess vector if it is present in the file, otherwise use a vector of all zeros.
184 */
185 PetscCall(PetscObjectSetName((PetscObject)x, x0_name));
186 PetscCall(VecSetFromOptions(x));
187 if (!truncate) {
188 /* load file_x0 if it is specified, otherwise try to reuse file */
189 if (file_x0[0]) {
190 PetscCall(PetscViewerDestroy(&fd));
191 if (hdf5) {
192 #if defined(PETSC_HAVE_HDF5)
193 PetscCall(PetscViewerHDF5Open(PETSC_COMM_WORLD, file_x0, FILE_MODE_READ, &fd));
194 #endif
195 } else {
196 PetscCall(PetscViewerBinaryOpen(PETSC_COMM_WORLD, file_x0, FILE_MODE_READ, &fd));
197 }
198 }
199 PetscCall(VecLoadIfExists_Private(x, fd, &has));
200 } else has = PETSC_FALSE;
201 if (truncate || !has) {
202 PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Failed to load initial guess, so use a vector of all zeros.\n"));
203 PetscCall(VecSet(x, 0.0));
204 nonzero_guess = PETSC_FALSE;
205 }
206 PetscCall(PetscViewerDestroy(&fd));
207
208 PetscCall(VecDuplicate(x, &Ab));
209
210 /* - - - - - - - - - - - New Stage - - - - - - - - - - - - -
211 Setup solve for system
212 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
213
214 /*
215 Conclude profiling last stage; begin profiling next stage.
216 */
217 PetscPreLoadStage("KSPSetUp");
218
219 PetscCall(MatCreateNormalHermitian(A, &N));
220 PetscCall(MatMultHermitianTranspose(A, b, Ab));
221
222 /*
223 Create linear solver; set operators; set runtime options.
224 */
225 PetscCall(KSPCreate(PETSC_COMM_WORLD, &ksp));
226
227 if (solve_normal) {
228 PetscCall(KSPSetOperators(ksp, N, N));
229 } else if (solve_augmented) {
230 Mat array[4], C, S;
231 Vec view;
232 PetscInt M, n;
233 PetscReal diag;
234
235 PetscCall(MatDestroy(&N));
236 PetscCall(MatGetSize(A, &M, NULL));
237 PetscCall(MatGetLocalSize(A, NULL, &n));
238 PetscCall(MatCreateConstantDiagonal(PETSC_COMM_WORLD, m, m, M, M, -1.0, array));
239 array[1] = A;
240 if (!explicit_transpose) PetscCall(MatCreateHermitianTranspose(A, array + 2));
241 else PetscCall(MatHermitianTranspose(A, MAT_INITIAL_MATRIX, array + 2));
242 PetscCall(PetscOptionsGetReal(NULL, NULL, "-nonzero_A11", &diag, &has));
243 if (has) PetscCall(MatCreateConstantDiagonal(PETSC_COMM_WORLD, n, n, PETSC_DECIDE, PETSC_DECIDE, diag, array + 3));
244 else array[3] = NULL;
245 PetscCall(MatCreateNest(PETSC_COMM_WORLD, 2, NULL, 2, NULL, array, &C));
246 if (!sbaij) PetscCall(MatNestSetVecType(C, VECNEST));
247 PetscCall(MatCreateVecs(C, v + 1, v));
248 PetscCall(VecSet(v[0], 0.0));
249 PetscCall(VecSet(v[1], 0.0));
250 if (!sbaij) {
251 PetscCall(VecNestGetSubVec(v[0], 0, &view));
252 PetscCall(VecCopy(b, view));
253 PetscCall(VecNestGetSubVec(v[1], 1, &view));
254 PetscCall(VecCopy(x, view));
255 PetscCall(KSPSetOperators(ksp, C, C));
256 } else {
257 const PetscScalar *read;
258 PetscScalar *write;
259 PetscCall(VecGetArrayRead(b, &read));
260 PetscCall(VecGetArrayWrite(v[0], &write));
261 for (PetscInt i = 0; i < m; ++i) write[i] = read[i];
262 PetscCall(VecRestoreArrayWrite(v[0], &write));
263 PetscCall(VecRestoreArrayRead(b, &read));
264 PetscCall(VecGetArrayRead(x, &read));
265 PetscCall(VecGetArrayWrite(v[1], &write));
266 for (PetscInt i = 0; i < n; ++i) write[m + i] = read[i];
267 PetscCall(VecRestoreArrayWrite(v[1], &write));
268 PetscCall(VecRestoreArrayRead(x, &read));
269 PetscCall(MatConvert(C, MATSBAIJ, MAT_INITIAL_MATRIX, &S));
270 PetscCall(KSPSetOperators(ksp, S, S));
271 PetscCall(MatDestroy(&S));
272 }
273 PetscCall(MatDestroy(&C));
274 PetscCall(MatDestroy(array));
275 PetscCall(MatDestroy(array + 2));
276 PetscCall(MatDestroy(array + 3));
277 } else {
278 PC pc;
279 PetscCall(KSPSetType(ksp, KSPLSQR));
280 PetscCall(KSPGetPC(ksp, &pc));
281 PetscCall(PCSetType(pc, PCNONE));
282 PetscCall(KSPSetOperators(ksp, A, N));
283 }
284 PetscCall(KSPSetInitialGuessNonzero(ksp, nonzero_guess));
285 PetscCall(KSPSetFromOptions(ksp));
286
287 /*
288 Here we explicitly call KSPSetUp() and KSPSetUpOnBlocks() to
289 enable more precise profiling of setting up the preconditioner.
290 These calls are optional, since both will be called within
291 KSPSolve() if they haven't been called already.
292 */
293 PetscCall(KSPSetUp(ksp));
294 PetscCall(KSPSetUpOnBlocks(ksp));
295
296 /*
297 Solve system
298 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
299
300 /*
301 Begin profiling next stage
302 */
303 PetscPreLoadStage("KSPSolve");
304
305 /*
306 Solve linear system
307 */
308 if (solve_normal) {
309 PetscCall(KSPSolve(ksp, Ab, x));
310 } else if (solve_augmented) {
311 KSP *subksp;
312 PC pc;
313 Mat C;
314 Vec view;
315 PetscBool flg;
316
317 PetscCall(KSPGetPC(ksp, &pc));
318 PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &flg));
319 if (flg) {
320 PetscCall(KSPGetOperators(ksp, &C, NULL));
321 PetscCall(PCFieldSplitGetSubKSP(pc, NULL, &subksp));
322 PetscCall(KSPGetPC(subksp[1], &pc));
323 PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCHPDDM, &flg));
324 if (flg) {
325 #if defined(PETSC_HAVE_HPDDM) && defined(PETSC_HAVE_DYNAMIC_LIBRARIES) && defined(PETSC_USE_SHARED_LIBRARIES)
326 Mat aux, S, **array;
327 IS is;
328
329 PetscCall(MatCreate(PETSC_COMM_SELF, &aux));
330 PetscCall(ISCreate(PETSC_COMM_SELF, &is));
331 PetscCall(PCHPDDMSetAuxiliaryMat(pc, is, aux, NULL, NULL)); /* dummy objects just to cover corner cases in PCSetUp() */
332 PetscCall(ISDestroy(&is));
333 PetscCall(MatDestroy(&aux));
334 PetscCall(MatNestGetSubMats(C, NULL, NULL, &array));
335 PetscCall(MatCreateSchurComplement(array[0][0], array[0][0], array[0][1], array[1][0], array[1][1], &S));
336 PetscCall(MatSetOptionsPrefix(S, "fieldsplit_1_"));
337 PetscCall(KSPSetOperators(subksp[1], S, S));
338 PetscCall(MatDestroy(&S));
339 PetscCall(PCSetFromOptions(pc));
340 #endif
341 }
342 PetscCall(PetscFree(subksp));
343 }
344 PetscCall(KSPSolve(ksp, v[0], v[1]));
345 if (!sbaij) {
346 PetscCall(VecNestGetSubVec(v[1], 1, &view));
347 PetscCall(VecCopy(view, x));
348 } else {
349 const PetscScalar *read;
350 PetscScalar *write;
351 PetscCall(MatGetLocalSize(A, &m, &n));
352 PetscCall(VecGetArrayRead(v[1], &read));
353 PetscCall(VecGetArrayWrite(x, &write));
354 for (PetscInt i = 0; i < n; ++i) write[i] = read[m + i];
355 PetscCall(VecRestoreArrayWrite(x, &write));
356 PetscCall(VecRestoreArrayRead(v[1], &read));
357 }
358 } else {
359 PetscCall(KSPSolve(ksp, b, x));
360 }
361 PetscCall(PetscObjectSetName((PetscObject)x, "x"));
362
363 /*
364 Conclude profiling this stage
365 */
366 PetscPreLoadStage("Cleanup");
367
368 /* - - - - - - - - - - - New Stage - - - - - - - - - - - - -
369 Check error, print output, free data structures.
370 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
371
372 /*
373 Check error
374 */
375 PetscCall(VecDuplicate(b, &r));
376 PetscCall(MatMult(A, x, r));
377 PetscCall(VecAXPY(r, -1.0, b));
378 PetscCall(VecNorm(r, NORM_2, &norm));
379 PetscCall(KSPGetIterationNumber(ksp, &its));
380 PetscCall(KSPGetType(ksp, &ksptype));
381 PetscCall(PetscPrintf(PETSC_COMM_WORLD, "KSP type: %s\n", ksptype));
382 PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Number of iterations = %3" PetscInt_FMT "\n", its));
383 PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Residual norm %g\n", (double)norm));
384
385 /*
386 Free work space. All PETSc objects should be destroyed when they
387 are no longer needed.
388 */
389 PetscCall(MatDestroy(&A));
390 PetscCall(VecDestroy(&b));
391 PetscCall(MatDestroy(&N));
392 PetscCall(VecDestroy(&Ab));
393 PetscCall(VecDestroy(&r));
394 PetscCall(VecDestroy(&x));
395 if (solve_augmented) {
396 PetscCall(VecDestroy(v));
397 PetscCall(VecDestroy(v + 1));
398 }
399 PetscCall(KSPDestroy(&ksp));
400 PetscPreLoadEnd();
401 /* -----------------------------------------------------------
402 End of linear solver loop
403 ----------------------------------------------------------- */
404
405 PetscCall(PetscFinalize());
406 return 0;
407 }
408
409 /*TEST
410
411 test:
412 suffix: 1
413 requires: datafilespath double !complex !defined(PETSC_USE_64BIT_INDICES)
414 args: -f ${DATAFILESPATH}/matrices/medium -ksp_view -ksp_monitor_short -ksp_max_it 100 -solve_normal
415
416 test:
417 suffix: 2
418 nsize: 2
419 requires: datafilespath double !complex !defined(PETSC_USE_64BIT_INDICES)
420 args: -f ${DATAFILESPATH}/matrices/shallow_water1 -ksp_view -ksp_monitor_short -ksp_max_it 100 -solve_normal -pc_type none
421
422 # Test handling failing VecLoad without abort
423 testset:
424 requires: double !complex !defined(PETSC_USE_64BIT_INDICES)
425 args: -ksp_type cg -ksp_view -ksp_converged_reason -ksp_monitor_short -ksp_max_it 10
426 test:
427 suffix: 3
428 nsize: {{1 2}separate output}
429 args: -f ${wPETSC_DIR}/share/petsc/datafiles/matrices/tiny_system
430 args: -f_x0 ${wPETSC_DIR}/share/petsc/datafiles/matrices/tiny_system_x0
431 test:
432 suffix: 3a
433 nsize: {{1 2}separate output}
434 args: -f ${wPETSC_DIR}/share/petsc/datafiles/matrices/tiny_system
435 args: -f_x0 NONEXISTING_FILE
436 test:
437 suffix: 3b
438 nsize: {{1 2}separate output}
439 args: -f ${wPETSC_DIR}/share/petsc/datafiles/matrices/tiny_system_with_x0 # this file includes all A, b and x0
440 test:
441 # Load square matrix, RHS and initial guess from HDF5 (Version 7.3 MAT-File)
442 suffix: 3b_hdf5
443 requires: hdf5 defined(PETSC_HDF5_HAVE_ZLIB)
444 nsize: {{1 2}separate output}
445 args: -f ${wPETSC_DIR}/share/petsc/datafiles/matrices/tiny_system_with_x0.mat -hdf5
446
447 # Test least-square algorithms
448 testset:
449 requires: datafilespath double !complex !defined(PETSC_USE_64BIT_INDICES)
450 args: -f ${DATAFILESPATH}/matrices/rectangular_ultrasound_4889x841
451 test:
452 suffix: 4
453 nsize: {{1 2 4}}
454 args: -ksp_converged_reason -ksp_monitor_short -ksp_rtol 1e-5 -ksp_max_it 100
455 args: -solve_normal -ksp_type cg
456 test:
457 suffix: 4a
458 nsize: {{1 2 4}}
459 args: -ksp_converged_reason -ksp_monitor_short -ksp_rtol 1e-5 -ksp_max_it 100
460 args: -ksp_type {{cgls lsqr}separate output}
461 test:
462 # Test KSPLSQR-specific options
463 suffix: 4b
464 nsize: 2
465 args: -ksp_converged_reason -ksp_rtol 1e-3 -ksp_max_it 200 -ksp_view
466 args: -ksp_type lsqr -ksp_convergence_test lsqr -ksp_lsqr_monitor -ksp_lsqr_compute_standard_error -ksp_lsqr_exact_mat_norm {{0 1}separate output}
467 test:
468 suffix: 4c
469 nsize: 4
470 requires: hpddm slepc defined(PETSC_HAVE_DYNAMIC_LIBRARIES) defined(PETSC_USE_SHARED_LIBRARIES)
471 filter: grep -v "shared subdomain KSP between SLEPc and PETSc" | grep -v "total: nonzeros="
472 args: -ksp_converged_reason -ksp_rtol 1e-5 -ksp_max_it 100 -ksp_view
473 args: -ksp_type lsqr -pc_type hpddm -pc_hpddm_define_subdomains -pc_hpddm_levels_1_eps_nev 20 -pc_hpddm_levels_1_st_share_sub_ksp {{false true}shared output}
474 args: -pc_hpddm_levels_1_pc_asm_sub_mat_type aij -pc_hpddm_levels_1_pc_asm_type basic -pc_hpddm_levels_1_sub_pc_type cholesky
475 test:
476 suffix: 4d
477 nsize: 4
478 requires: hpddm slepc suitesparse defined(PETSC_HAVE_DYNAMIC_LIBRARIES) defined(PETSC_USE_SHARED_LIBRARIES)
479 filter: grep -v "shared subdomain KSP between SLEPc and PETSc"
480 args: -ksp_converged_reason -ksp_rtol 1e-5 -ksp_max_it 100 -ksp_view
481 args: -ksp_type lsqr -pc_type hpddm -pc_hpddm_define_subdomains -pc_hpddm_levels_1_eps_nev 20 -pc_hpddm_levels_1_st_share_sub_ksp {{false true}shared output} -pc_hpddm_levels_1_st_pc_type qr
482 args: -pc_hpddm_levels_1_pc_asm_sub_mat_type normalh -pc_hpddm_levels_1_pc_asm_type basic -pc_hpddm_levels_1_sub_pc_type qr
483 test:
484 suffix: 4e
485 nsize: 4
486 requires: hpddm slepc defined(PETSC_HAVE_DYNAMIC_LIBRARIES) defined(PETSC_USE_SHARED_LIBRARIES)
487 args: -solve_augmented -ksp_type gmres
488 args: -pc_type fieldsplit -pc_fieldsplit_type schur -pc_fieldsplit_schur_precondition self -fieldsplit_0_pc_type jacobi -fieldsplit_ksp_type preonly
489 args: -prefix_push fieldsplit_1_ -pc_type hpddm -pc_hpddm_schur_precondition least_squares -pc_hpddm_define_subdomains -pc_hpddm_levels_1_eps_nev 20 -pc_hpddm_levels_1_st_share_sub_ksp -pc_hpddm_levels_1_sub_pc_type cholesky -prefix_pop -fieldsplit_1_mat_schur_complement_ainv_type {{lump blockdiag}shared output}
490 test:
491 suffix: 4f
492 nsize: 4
493 requires: hpddm slepc suitesparse defined(PETSC_HAVE_DYNAMIC_LIBRARIES) defined(PETSC_USE_SHARED_LIBRARIES)
494 filter: sed -e "s/(1,0) : type=mpiaij/(1,0) : type=transpose/g" -e "s/hermitiantranspose/transpose/g"
495 args: -solve_augmented -ksp_type gmres -ksp_view -explicit_transpose {{false true}shared output}
496 args: -pc_type fieldsplit -pc_fieldsplit_type schur -pc_fieldsplit_schur_precondition self -fieldsplit_0_pc_type jacobi -fieldsplit_ksp_type preonly
497 args: -prefix_push fieldsplit_1_ -pc_type hpddm -pc_hpddm_schur_precondition least_squares -pc_hpddm_define_subdomains -pc_hpddm_levels_1_eps_nev 20 -pc_hpddm_levels_1_st_share_sub_ksp -pc_hpddm_levels_1_sub_pc_type qr -prefix_pop
498 test:
499 suffix: 4f_nonzero
500 nsize: 4
501 requires: hpddm slepc suitesparse defined(PETSC_HAVE_DYNAMIC_LIBRARIES) defined(PETSC_USE_SHARED_LIBRARIES)
502 args: -solve_augmented -nonzero_A11 {{0.0 1e-14}shared output} -ksp_type gmres
503 args: -pc_type fieldsplit -pc_fieldsplit_type schur -pc_fieldsplit_schur_precondition self -fieldsplit_0_pc_type jacobi -fieldsplit_ksp_type preonly
504 args: -prefix_push fieldsplit_1_ -pc_type hpddm -pc_hpddm_schur_precondition least_squares -pc_hpddm_define_subdomains -pc_hpddm_levels_1_eps_nev 20 -pc_hpddm_levels_1_st_share_sub_ksp -pc_hpddm_levels_1_sub_pc_type qr -prefix_pop
505 test:
506 suffix: 4f_nonzero_shift
507 nsize: 4
508 output_file: output/ex27_4f_nonzero.out
509 requires: hpddm slepc defined(PETSC_HAVE_DYNAMIC_LIBRARIES) defined(PETSC_USE_SHARED_LIBRARIES)
510 args: -solve_augmented -nonzero_A11 {{0.0 1e-6}shared output} -ksp_type gmres
511 args: -pc_type fieldsplit -pc_fieldsplit_type schur -pc_fieldsplit_schur_precondition self -fieldsplit_0_pc_type jacobi -fieldsplit_ksp_type preonly
512 args: -prefix_push fieldsplit_1_ -pc_type hpddm -pc_hpddm_schur_precondition least_squares -pc_hpddm_define_subdomains -pc_hpddm_levels_1_eps_nev 20 -pc_hpddm_levels_1_st_share_sub_ksp -pc_hpddm_levels_1_sub_pc_type cholesky -pc_hpddm_levels_1_eps_gen_non_hermitian -prefix_pop
513 test:
514 suffix: 4g
515 nsize: 4
516 requires: hypre !defined(PETSC_HAVE_HYPRE_DEVICE)
517 args: -ksp_converged_reason -ksp_monitor_short -ksp_rtol 1e-5 -ksp_max_it 100
518 args: -ksp_type lsqr -pc_type hypre
519 test:
520 suffix: 4h
521 nsize: {{1 4}}
522 args: -solve_augmented -pc_type fieldsplit -pc_fieldsplit_type schur -pc_fieldsplit_schur_precondition self -pc_fieldsplit_detect_saddle_point -sbaij true -ksp_type fgmres
523
524 test:
525 # Load rectangular matrix from HDF5 (Version 7.3 MAT-File)
526 suffix: 4a_lsqr_hdf5
527 nsize: {{1 2 4 8}}
528 requires: datafilespath double !complex !defined(PETSC_USE_64BIT_INDICES) hdf5 defined(PETSC_HDF5_HAVE_ZLIB)
529 args: -f ${DATAFILESPATH}/matrices/matlab/rectangular_ultrasound_4889x841.mat -hdf5
530 args: -ksp_converged_reason -ksp_monitor_short -ksp_rtol 1e-5 -ksp_max_it 100
531 args: -ksp_type lsqr
532 args: -test_custom_layout {{0 1}}
533
534 # Test for correct cgls convergence reason
535 test:
536 suffix: 5
537 nsize: 1
538 requires: datafilespath double !complex !defined(PETSC_USE_64BIT_INDICES)
539 args: -f ${DATAFILESPATH}/matrices/rectangular_ultrasound_4889x841
540 args: -ksp_converged_reason -ksp_rtol 1e-2 -ksp_max_it 100
541 args: -ksp_type cgls
542
543 # Load a matrix, RHS and solution from HDF5 (Version 7.3 MAT-File). Test immediate convergence.
544 testset:
545 nsize: {{1 2 4 8}}
546 requires: datafilespath double !complex !defined(PETSC_USE_64BIT_INDICES) hdf5 defined(PETSC_HDF5_HAVE_ZLIB)
547 args: -ksp_converged_reason -ksp_monitor_short -ksp_rtol 1e-5 -ksp_max_it 10
548 args: -ksp_type lsqr
549 args: -test_custom_layout {{0 1}}
550 args: -hdf5 -x0_name x
551 test:
552 suffix: 6_hdf5
553 args: -f ${DATAFILESPATH}/matrices/matlab/small.mat
554 test:
555 suffix: 6_hdf5_rect
556 args: -f ${DATAFILESPATH}/matrices/matlab/small_rect.mat
557 test:
558 suffix: 6_hdf5_dense
559 args: -f ${DATAFILESPATH}/matrices/matlab/small_dense.mat -mat_type dense
560 test:
561 suffix: 6_hdf5_rect_dense
562 args: -f ${DATAFILESPATH}/matrices/matlab/small_rect_dense.mat -mat_type dense
563
564 # Test correct handling of local dimensions in PCApply
565 testset:
566 requires: datafilespath double !complex !defined(PETSC_USE_64BIT_INDICES)
567 requires: hdf5 defined(PETSC_HDF5_HAVE_ZLIB)
568 nsize: 3
569 suffix: 7
570 args: -f ${DATAFILESPATH}/matrices/matlab/small.mat -hdf5 -test_custom_layout 1 -ksp_type lsqr -pc_type jacobi
571
572 # Test complex matrices
573 testset:
574 requires: double complex !defined(PETSC_USE_64BIT_INDICES)
575 args: -f ${wPETSC_DIR}/share/petsc/datafiles/matrices/nh-complex-int32-float64
576 output_file: output/ex27_8.out
577 filter: grep -v "KSP type"
578 test:
579 suffix: 8
580 args: -solve_normal 0 -ksp_type {{lsqr cgls}}
581 test:
582 suffix: 8_normal
583 args: -solve_normal 1 -ksp_type {{cg bicg}}
584
585 testset:
586 requires: double suitesparse !defined(PETSC_USE_64BIT_INDICES)
587 args: -solve_normal {{0 1}shared output} -pc_type qr
588 output_file: output/ex27_9.out
589 filter: grep -v "KSP type"
590 test:
591 suffix: 9_real
592 requires: !complex
593 args: -f ${wPETSC_DIR}/share/petsc/datafiles/matrices/ns-real-int32-float64
594 test:
595 suffix: 9_complex
596 requires: complex
597 args: -f ${wPETSC_DIR}/share/petsc/datafiles/matrices/nh-complex-int32-float64
598
599 test:
600 suffix: 10
601 requires: !complex double suitesparse !defined(PETSC_USE_64BIT_INDICES)
602 nsize: 2
603 args: -f ${wPETSC_DIR}/share/petsc/datafiles/matrices/ns-real-int32-float64 -pc_type bjacobi -sub_pc_type qr
604
605 test:
606 suffix: 11
607 nsize: 4
608 requires: datafilespath double complex !defined(PETSC_USE_64BIT_INDICES) hpddm slepc defined(PETSC_HAVE_DYNAMIC_LIBRARIES) defined(PETSC_USE_SHARED_LIBRARIES)
609 args: -f ${DATAFILESPATH}/matrices/farzad_B_rhs -truncate
610 args: -ksp_converged_reason -ksp_rtol 1e-5 -ksp_max_it 100
611 args: -ksp_type lsqr -pc_type hpddm -pc_hpddm_define_subdomains -pc_hpddm_levels_1_eps_nev 20 -pc_hpddm_levels_1_eps_threshold_absolute 1e-6
612 args: -pc_hpddm_levels_1_pc_asm_sub_mat_type aij -pc_hpddm_levels_1_pc_asm_type basic -pc_hpddm_levels_1_sub_pc_type lu -pc_hpddm_coarse_pc_type lu
613 filter: sed -e "s/ 10/ 9/g"
614
615 TEST*/
616