xref: /petsc/src/ksp/pc/tutorials/ex3.c (revision dbff2e812d3f1975f2fc386c980b1b5f65f538ff)
1 
2 static char help[] = "Test PC redistribute on matrix with load imbalance. \n\
3                       Modified from src/ksp/ksp/tutorials/ex2.c.\n\
4 Input parameters include:\n\
5   -random_exact_sol : use a random exact solution vector\n\
6   -view_exact_sol   : write exact solution vector to stdout\n\
7   -n <mesh_y>       : number of mesh points\n\n";
8 /*
9 Example:
10   mpiexec -n 8 ./ex3 -n 10000 -ksp_type cg -pc_type bjacobi -sub_pc_type icc -ksp_rtol 1.e-8 -log_view
11   mpiexec -n 8 ./ex3 -n 10000 -ksp_type preonly -pc_type redistribute -redistribute_ksp_type cg -redistribute_pc_type bjacobi -redistribute_sub_pc_type icc -redistribute_ksp_rtol 1.e-8 -log_view
12 */
13 
14 #include <petscksp.h>
15 
16 int main(int argc, char **args)
17 {
18   Vec           x, b, u; /* approx solution, RHS, exact solution */
19   Mat           A;       /* linear system matrix */
20   KSP           ksp;     /* linear solver context */
21   PetscRandom   rctx;    /* random number generator context */
22   PetscReal     norm;    /* norm of solution error */
23   PetscInt      i, j, Ii, J, Istart, Iend, m, n = 7, its, nloc, matdistribute = 0;
24   PetscBool     flg = PETSC_FALSE;
25   PetscScalar   v;
26   PetscMPIInt   rank, size;
27   PetscLogStage stage;
28 
29   PetscFunctionBeginUser;
30   PetscCall(PetscInitialize(&argc, &args, (char *)0, help));
31   PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &size));
32   PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank));
33   PetscCheck(size > 1, PETSC_COMM_WORLD, PETSC_ERR_WRONG_MPI_SIZE, "This example requires at least 2 MPI processes!");
34 
35   PetscCall(PetscOptionsGetInt(NULL, NULL, "-n", &n, NULL));
36   PetscCall(PetscOptionsGetInt(NULL, NULL, "-matdistribute", &matdistribute, NULL));
37   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
38          Compute the matrix and right-hand-side vector that define
39          the linear system, Ax = b.
40      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
41   switch (matdistribute) {
42   case 1: /* very imbalanced process load for matrix A */
43     m    = (1 + size) * size;
44     nloc = (rank + 1) * n;
45     if (rank == size - 1) { /* proc[size-1] stores all remaining rows */
46       nloc = m * n;
47       for (i = 0; i < size - 1; i++) nloc -= (i + 1) * n;
48     }
49     break;
50   default: /* proc[0] and proc[1] load much smaller row blocks, the rest processes have same loads */
51     if (rank == 0 || rank == 1) {
52       nloc = n;
53     } else {
54       nloc = 10 * n; /* 10x larger load */
55     }
56     m = 2 + (size - 2) * 10;
57     break;
58   }
59   PetscCall(MatCreate(PETSC_COMM_WORLD, &A));
60   PetscCall(MatSetSizes(A, nloc, nloc, PETSC_DECIDE, PETSC_DECIDE));
61   PetscCall(MatSetFromOptions(A));
62   PetscCall(MatMPIAIJSetPreallocation(A, 5, NULL, 5, NULL));
63   PetscCall(MatSeqAIJSetPreallocation(A, 5, NULL));
64   PetscCall(MatSetUp(A));
65 
66   PetscCall(MatGetOwnershipRange(A, &Istart, &Iend));
67   nloc = Iend - Istart;
68   PetscCall(PetscSynchronizedPrintf(PETSC_COMM_WORLD, "[%d] A Istart,Iend: %" PetscInt_FMT " %" PetscInt_FMT "; nloc %" PetscInt_FMT "\n", rank, Istart, Iend, nloc));
69   PetscCall(PetscSynchronizedFlush(PETSC_COMM_WORLD, PETSC_STDOUT));
70 
71   PetscCall(PetscLogStageRegister("Assembly", &stage));
72   PetscCall(PetscLogStagePush(stage));
73   for (Ii = Istart; Ii < Iend; Ii++) {
74     v = -1.0;
75     i = Ii / n;
76     j = Ii - i * n;
77     if (i > 0) {
78       J = Ii - n;
79       PetscCall(MatSetValues(A, 1, &Ii, 1, &J, &v, INSERT_VALUES));
80     }
81     if (i < m - 1) {
82       J = Ii + n;
83       PetscCall(MatSetValues(A, 1, &Ii, 1, &J, &v, INSERT_VALUES));
84     }
85     if (j > 0) {
86       J = Ii - 1;
87       PetscCall(MatSetValues(A, 1, &Ii, 1, &J, &v, INSERT_VALUES));
88     }
89     if (j < n - 1) {
90       J = Ii + 1;
91       PetscCall(MatSetValues(A, 1, &Ii, 1, &J, &v, INSERT_VALUES));
92     }
93     v = 4.0;
94     PetscCall(MatSetValues(A, 1, &Ii, 1, &Ii, &v, INSERT_VALUES));
95   }
96   PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY));
97   PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY));
98   PetscCall(PetscLogStagePop());
99 
100   /* A is symmetric. Set symmetric flag to enable ICC/Cholesky preconditioner */
101   PetscCall(MatSetOption(A, MAT_SYMMETRIC, PETSC_TRUE));
102 
103   /* Create parallel vectors. */
104   PetscCall(VecCreate(PETSC_COMM_WORLD, &u));
105   PetscCall(VecSetSizes(u, nloc, PETSC_DECIDE));
106   PetscCall(VecSetFromOptions(u));
107   PetscCall(VecDuplicate(u, &b));
108   PetscCall(VecDuplicate(b, &x));
109 
110   /* Set exact solution; then compute right-hand-side vector. */
111   PetscCall(PetscOptionsGetBool(NULL, NULL, "-random_exact_sol", &flg, NULL));
112   if (flg) {
113     PetscCall(PetscRandomCreate(PETSC_COMM_WORLD, &rctx));
114     PetscCall(PetscRandomSetFromOptions(rctx));
115     PetscCall(VecSetRandom(u, rctx));
116     PetscCall(PetscRandomDestroy(&rctx));
117   } else {
118     PetscCall(VecSet(u, 1.0));
119   }
120   PetscCall(MatMult(A, u, b));
121 
122   /* View the exact solution vector if desired */
123   flg = PETSC_FALSE;
124   PetscCall(PetscOptionsGetBool(NULL, NULL, "-view_exact_sol", &flg, NULL));
125   if (flg) PetscCall(VecView(u, PETSC_VIEWER_STDOUT_WORLD));
126 
127   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
128                 Create the linear solver and set various options
129      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
130   PetscCall(KSPCreate(PETSC_COMM_WORLD, &ksp));
131   PetscCall(KSPSetOperators(ksp, A, A));
132   PetscCall(KSPSetTolerances(ksp, 1.e-2 / ((m + 1) * (n + 1)), PETSC_DEFAULT, PETSC_DEFAULT, PETSC_DEFAULT));
133   PetscCall(KSPSetFromOptions(ksp));
134 
135   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
136                       Solve the linear system
137      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
138   PetscCall(KSPSolve(ksp, b, x));
139 
140   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
141                       Check solution and clean up
142      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
143   PetscCall(VecAXPY(x, -1.0, u));
144   PetscCall(VecNorm(x, NORM_2, &norm));
145   PetscCall(KSPGetIterationNumber(ksp, &its));
146   PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Norm of error %g iterations %" PetscInt_FMT "\n", (double)norm, its));
147 
148   /* Free work space. */
149   PetscCall(KSPDestroy(&ksp));
150   PetscCall(VecDestroy(&u));
151   PetscCall(VecDestroy(&x));
152   PetscCall(VecDestroy(&b));
153   PetscCall(MatDestroy(&A));
154   PetscCall(PetscFinalize());
155   return 0;
156 }
157 
158 /*TEST
159 
160    test:
161       nsize: 8
162       args: -n 100 -ksp_type cg -pc_type bjacobi -sub_pc_type icc -ksp_rtol 1.e-8
163 
164    test:
165       suffix: 2
166       nsize: 8
167       args: -n 100 -ksp_type preonly -pc_type redistribute -redistribute_ksp_type cg -redistribute_pc_type bjacobi -redistribute_sub_pc_type icc -redistribute_ksp_rtol 1.e-8
168 
169 TEST*/
170