1 #include <petscksp.h>
2
3 static char help[] = "Demonstrate PCFIELDSPLIT after MatZeroRowsColumns() inside PCREDISTRIBUTE";
4
main(int argc,char ** argv)5 int main(int argc, char **argv)
6 {
7 PetscMPIInt rank, size;
8 Mat A;
9 IS field0, field1, zeroedrows;
10 PetscInt row;
11 KSP ksp, kspred;
12 PC pc;
13 Vec x, b;
14
15 PetscCall(PetscInitialize(&argc, &argv, NULL, help));
16 PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &size));
17 PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank));
18 PetscCheck(size == 2, PETSC_COMM_WORLD, PETSC_ERR_WRONG_MPI_SIZE, "Must be run with 2 MPI processes");
19
20 // Set up a small problem with 2 dofs on rank 0 and 4 on rank 1
21 PetscCall(MatCreate(PETSC_COMM_WORLD, &A));
22 PetscCall(MatSetSizes(A, !rank ? 2 : 4, !rank ? 2 : 4, PETSC_DETERMINE, PETSC_DETERMINE));
23 PetscCall(MatSetFromOptions(A));
24 if (rank == 0) {
25 PetscCall(MatSetValue(A, 0, 0, 2.0, INSERT_VALUES));
26 PetscCall(MatSetValue(A, 0, 1, -1.0, INSERT_VALUES));
27 PetscCall(MatSetValue(A, 1, 1, 3.0, INSERT_VALUES));
28 PetscCall(MatSetValue(A, 1, 2, -1.0, INSERT_VALUES));
29 } else if (rank == 1) {
30 PetscCall(MatSetValue(A, 2, 2, 4.0, INSERT_VALUES));
31 PetscCall(MatSetValue(A, 2, 3, -1.0, INSERT_VALUES));
32 PetscCall(MatSetValue(A, 3, 3, 5.0, INSERT_VALUES));
33 PetscCall(MatSetValue(A, 3, 4, -1.0, INSERT_VALUES));
34 PetscCall(MatSetValue(A, 4, 4, 6.0, INSERT_VALUES));
35 PetscCall(MatSetValue(A, 4, 5, -1.0, INSERT_VALUES));
36 PetscCall(MatSetValue(A, 5, 5, 7.0, INSERT_VALUES));
37 PetscCall(MatSetValue(A, 5, 4, -0.5, INSERT_VALUES));
38 }
39 PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY));
40 PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY));
41 PetscCall(MatView(A, PETSC_VIEWER_STDOUT_WORLD));
42
43 PetscCall(MatCreateVecs(A, &b, &x));
44 PetscCall(VecSet(b, 1.0));
45
46 // the two fields for PCFIELDSPLIT are initially (0,2,4) and (1,3,5)
47 PetscCall(ISCreateStride(PETSC_COMM_WORLD, !rank ? 1 : 2, !rank ? 0 : 2, 2, &field0));
48 PetscCall(ISView(field0, PETSC_VIEWER_STDOUT_WORLD));
49 PetscCall(ISCreateStride(PETSC_COMM_WORLD, !rank ? 1 : 2, !rank ? 1 : 3, 2, &field1));
50 PetscCall(ISView(field1, PETSC_VIEWER_STDOUT_WORLD));
51
52 // these rows are being zeroed (0,3)
53 row = (!rank ? 0 : 3);
54 PetscCall(ISCreateGeneral(PETSC_COMM_WORLD, 1, &row, PETSC_COPY_VALUES, &zeroedrows));
55 PetscCall(ISView(zeroedrows, PETSC_VIEWER_STDOUT_WORLD));
56
57 PetscCall(MatZeroRowsColumnsIS(A, zeroedrows, 1.0, NULL, NULL));
58 PetscCall(MatView(A, PETSC_VIEWER_STDOUT_WORLD));
59
60 PetscCall(KSPCreate(PETSC_COMM_WORLD, &ksp));
61 PetscCall(KSPSetOperators(ksp, A, A));
62 PetscCall(KSPGetPC(ksp, &pc));
63 PetscCall(PCSetType(pc, PCREDISTRIBUTE));
64 /* note that one provides the indices for the fields on the original full system, not on the reduced system PCREDISTRIBUTE solves */
65 PetscCall(PCFieldSplitSetIS(pc, NULL, field0));
66 PetscCall(PCFieldSplitSetIS(pc, NULL, field1));
67 PetscCall(KSPSetFromOptions(ksp));
68
69 PetscCall(KSPSolve(ksp, b, x));
70
71 PetscCall(KSPSetInitialGuessNonzero(ksp, PETSC_TRUE));
72 PetscCall(PCRedistributeGetKSP(pc, &kspred));
73 PetscCall(KSPSetInitialGuessNonzero(kspred, PETSC_TRUE));
74 PetscCall(KSPSolve(ksp, b, x));
75 PetscCall(PetscOptionsClearValue(NULL, "-ksp_view"));
76 if (rank == 0) PetscCall(MatSetValue(A, 1, 2, 0.0, INSERT_VALUES));
77 PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY));
78 PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY));
79 PetscCall(MatSetOption(A, MAT_STRUCTURALLY_SYMMETRIC, PETSC_TRUE));
80 PetscCall(KSPSetFromOptions(ksp));
81 PetscCall(KSPSolveTranspose(ksp, b, x));
82 PetscCall(KSPDestroy(&ksp));
83 PetscCall(VecDestroy(&b));
84 PetscCall(VecDestroy(&x));
85 PetscCall(MatDestroy(&A));
86 PetscCall(ISDestroy(&field0));
87 PetscCall(ISDestroy(&field1));
88 PetscCall(ISDestroy(&zeroedrows));
89 PetscCall(PetscFinalize());
90 return 0;
91 }
92
93 /*TEST
94
95 test:
96 nsize: 2
97 args: -ksp_monitor -redistribute_ksp_monitor -ksp_view -redistribute_pc_type fieldsplit -ksp_type preonly
98
99 TEST*/
100