xref: /petsc/src/ksp/pc/tutorials/ex3.c (revision efbe7e8a80d07327753dbe0b33efee01e046af3f)
1 
2 static char help[] = "Test PC redistribute on matrix with load imbalance. \n\
3                       Modified from src/ksp/ksp/tutorials/ex2.c.\n\
4 Input parameters include:\n\
5   -random_exact_sol : use a random exact solution vector\n\
6   -view_exact_sol   : write exact solution vector to stdout\n\
7   -n <mesh_y>       : number of mesh points\n\n";
8 /*
9 Example:
10   mpiexec -n 8 ./ex3 -n 10000 -ksp_type cg -pc_type bjacobi -sub_pc_type icc -ksp_rtol 1.e-8 -log_view
11   mpiexec -n 8 ./ex3 -n 10000 -ksp_type preonly -pc_type redistribute -redistribute_ksp_type cg -redistribute_pc_type bjacobi -redistribute_sub_pc_type icc -redistribute_ksp_rtol 1.e-8 -log_view
12 */
13 
14 #include <petscksp.h>
15 
16 int main(int argc,char **args)
17 {
18   Vec            x,b,u;    /* approx solution, RHS, exact solution */
19   Mat            A;        /* linear system matrix */
20   KSP            ksp;      /* linear solver context */
21   PetscRandom    rctx;     /* random number generator context */
22   PetscReal      norm;     /* norm of solution error */
23   PetscInt       i,j,Ii,J,Istart,Iend,m,n = 7,its,nloc,matdistribute=0;
24   PetscErrorCode ierr;
25   PetscBool      flg = PETSC_FALSE;
26   PetscScalar    v;
27   PetscMPIInt    rank,size;
28 #if defined(PETSC_USE_LOG)
29   PetscLogStage stage;
30 #endif
31 
32   ierr = PetscInitialize(&argc,&args,(char*)0,help);if (ierr) return ierr;
33   ierr = MPI_Comm_size(PETSC_COMM_WORLD,&size);CHKERRQ(ierr);
34   ierr = MPI_Comm_rank(PETSC_COMM_WORLD,&rank);CHKERRQ(ierr);
35   if (size < 2) SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_WRONG_MPI_SIZE,"This example requires at least 2 MPI processes!");
36 
37   ierr = PetscOptionsGetInt(NULL,NULL,"-n",&n,NULL);CHKERRQ(ierr);
38   ierr = PetscOptionsGetInt(NULL,NULL,"-matdistribute",&matdistribute,NULL);CHKERRQ(ierr);
39   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
40          Compute the matrix and right-hand-side vector that define
41          the linear system, Ax = b.
42      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
43   switch(matdistribute) {
44   case 1: /* very imbalanced process load for matrix A */
45     m    = (1+size)*size;
46     nloc = (rank+1)*n;
47     if (rank == size-1) { /* proc[size-1] stores all remaining rows */
48       nloc = m*n;
49       for (i=0; i<size-1; i++){
50         nloc -= (i+1)*n;
51       }
52     }
53     break;
54   default: /* proc[0] and proc[1] load much smaller row blocks, the rest processes have same loads */
55     if (rank == 0 || rank == 1) {
56       nloc = n;
57     } else {
58       nloc = 10*n; /* 10x larger load */
59     }
60     m = 2 + (size-2)*10;
61     break;
62   }
63   ierr = MatCreate(PETSC_COMM_WORLD,&A);CHKERRQ(ierr);
64   ierr = MatSetSizes(A,nloc,nloc,PETSC_DECIDE,PETSC_DECIDE);CHKERRQ(ierr);
65   ierr = MatSetFromOptions(A);CHKERRQ(ierr);
66   ierr = MatMPIAIJSetPreallocation(A,5,NULL,5,NULL);CHKERRQ(ierr);
67   ierr = MatSeqAIJSetPreallocation(A,5,NULL);CHKERRQ(ierr);
68   ierr = MatSetUp(A);CHKERRQ(ierr);
69 
70   ierr = MatGetOwnershipRange(A,&Istart,&Iend);CHKERRQ(ierr);
71   nloc = Iend-Istart;
72   ierr = PetscSynchronizedPrintf(PETSC_COMM_WORLD,"[%d] A Istart,Iend: %D %D; nloc %D\n",rank,Istart,Iend,nloc);CHKERRQ(ierr);
73   ierr = PetscSynchronizedFlush(PETSC_COMM_WORLD,PETSC_STDOUT);CHKERRQ(ierr);
74 
75   ierr = PetscLogStageRegister("Assembly", &stage);CHKERRQ(ierr);
76   ierr = PetscLogStagePush(stage);CHKERRQ(ierr);
77   for (Ii=Istart; Ii<Iend; Ii++) {
78     v = -1.0; i = Ii/n; j = Ii - i*n;
79     if (i>0)   {J = Ii - n; ierr = MatSetValues(A,1,&Ii,1,&J,&v,INSERT_VALUES);CHKERRQ(ierr);}
80     if (i<m-1) {J = Ii + n; ierr = MatSetValues(A,1,&Ii,1,&J,&v,INSERT_VALUES);CHKERRQ(ierr);}
81     if (j>0)   {J = Ii - 1; ierr = MatSetValues(A,1,&Ii,1,&J,&v,INSERT_VALUES);CHKERRQ(ierr);}
82     if (j<n-1) {J = Ii + 1; ierr = MatSetValues(A,1,&Ii,1,&J,&v,INSERT_VALUES);CHKERRQ(ierr);}
83     v = 4.0; ierr = MatSetValues(A,1,&Ii,1,&Ii,&v,INSERT_VALUES);CHKERRQ(ierr);
84   }
85   ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
86   ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
87   ierr = PetscLogStagePop();CHKERRQ(ierr);
88 
89   /* A is symmetric. Set symmetric flag to enable ICC/Cholesky preconditioner */
90   ierr = MatSetOption(A,MAT_SYMMETRIC,PETSC_TRUE);CHKERRQ(ierr);
91 
92   /* Create parallel vectors. */
93   ierr = VecCreate(PETSC_COMM_WORLD,&u);CHKERRQ(ierr);
94   ierr = VecSetSizes(u,nloc,PETSC_DECIDE);CHKERRQ(ierr);
95   ierr = VecSetFromOptions(u);CHKERRQ(ierr);
96   ierr = VecDuplicate(u,&b);CHKERRQ(ierr);
97   ierr = VecDuplicate(b,&x);CHKERRQ(ierr);
98 
99   /* Set exact solution; then compute right-hand-side vector. */
100   ierr = PetscOptionsGetBool(NULL,NULL,"-random_exact_sol",&flg,NULL);CHKERRQ(ierr);
101   if (flg) {
102     ierr = PetscRandomCreate(PETSC_COMM_WORLD,&rctx);CHKERRQ(ierr);
103     ierr = PetscRandomSetFromOptions(rctx);CHKERRQ(ierr);
104     ierr = VecSetRandom(u,rctx);CHKERRQ(ierr);
105     ierr = PetscRandomDestroy(&rctx);CHKERRQ(ierr);
106   } else {
107     ierr = VecSet(u,1.0);CHKERRQ(ierr);
108   }
109   ierr = MatMult(A,u,b);CHKERRQ(ierr);
110 
111   /* View the exact solution vector if desired */
112   flg  = PETSC_FALSE;
113   ierr = PetscOptionsGetBool(NULL,NULL,"-view_exact_sol",&flg,NULL);CHKERRQ(ierr);
114   if (flg) {ierr = VecView(u,PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);}
115 
116   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
117                 Create the linear solver and set various options
118      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
119   ierr = KSPCreate(PETSC_COMM_WORLD,&ksp);CHKERRQ(ierr);
120   ierr = KSPSetOperators(ksp,A,A);CHKERRQ(ierr);
121   ierr = KSPSetTolerances(ksp,1.e-2/((m+1)*(n+1)),PETSC_DEFAULT,PETSC_DEFAULT,
122                           PETSC_DEFAULT);CHKERRQ(ierr);
123   ierr = KSPSetFromOptions(ksp);CHKERRQ(ierr);
124 
125   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
126                       Solve the linear system
127      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
128   ierr = KSPSolve(ksp,b,x);CHKERRQ(ierr);
129 
130   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
131                       Check solution and clean up
132      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
133   ierr = VecAXPY(x,-1.0,u);CHKERRQ(ierr);
134   ierr = VecNorm(x,NORM_2,&norm);CHKERRQ(ierr);
135   ierr = KSPGetIterationNumber(ksp,&its);CHKERRQ(ierr);
136   ierr = PetscPrintf(PETSC_COMM_WORLD,"Norm of error %g iterations %D\n",(double)norm,its);CHKERRQ(ierr);
137 
138   /* Free work space. */
139   ierr = KSPDestroy(&ksp);CHKERRQ(ierr);
140   ierr = VecDestroy(&u);CHKERRQ(ierr);  ierr = VecDestroy(&x);CHKERRQ(ierr);
141   ierr = VecDestroy(&b);CHKERRQ(ierr);  ierr = MatDestroy(&A);CHKERRQ(ierr);
142   ierr = PetscFinalize();
143   return ierr;
144 }
145 
146 
147 /*TEST
148 
149    test:
150       nsize: 8
151       args: -n 100 -ksp_type cg -pc_type bjacobi -sub_pc_type icc -ksp_rtol 1.e-8
152 
153    test:
154       suffix: 2
155       nsize: 8
156       args: -n 100 -ksp_type preonly -pc_type redistribute -redistribute_ksp_type cg -redistribute_pc_type bjacobi -redistribute_sub_pc_type icc -redistribute_ksp_rtol 1.e-8
157 
158 TEST*/
159