1 2 static char help[] = "Test PC redistribute on matrix with load imbalance. \n\ 3 Modified from src/ksp/ksp/tutorials/ex2.c.\n\ 4 Input parameters include:\n\ 5 -random_exact_sol : use a random exact solution vector\n\ 6 -view_exact_sol : write exact solution vector to stdout\n\ 7 -n <mesh_y> : number of mesh points\n\n"; 8 /* 9 Example: 10 mpiexec -n 8 ./ex3 -n 10000 -ksp_type cg -pc_type bjacobi -sub_pc_type icc -ksp_rtol 1.e-8 -log_view 11 mpiexec -n 8 ./ex3 -n 10000 -ksp_type preonly -pc_type redistribute -redistribute_ksp_type cg -redistribute_pc_type bjacobi -redistribute_sub_pc_type icc -redistribute_ksp_rtol 1.e-8 -log_view 12 */ 13 14 #include <petscksp.h> 15 16 int main(int argc, char **args) { 17 Vec x, b, u; /* approx solution, RHS, exact solution */ 18 Mat A; /* linear system matrix */ 19 KSP ksp; /* linear solver context */ 20 PetscRandom rctx; /* random number generator context */ 21 PetscReal norm; /* norm of solution error */ 22 PetscInt i, j, Ii, J, Istart, Iend, m, n = 7, its, nloc, matdistribute = 0; 23 PetscBool flg = PETSC_FALSE; 24 PetscScalar v; 25 PetscMPIInt rank, size; 26 #if defined(PETSC_USE_LOG) 27 PetscLogStage stage; 28 #endif 29 30 PetscFunctionBeginUser; 31 PetscCall(PetscInitialize(&argc, &args, (char *)0, help)); 32 PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &size)); 33 PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank)); 34 PetscCheck(size > 1, PETSC_COMM_WORLD, PETSC_ERR_WRONG_MPI_SIZE, "This example requires at least 2 MPI processes!"); 35 36 PetscCall(PetscOptionsGetInt(NULL, NULL, "-n", &n, NULL)); 37 PetscCall(PetscOptionsGetInt(NULL, NULL, "-matdistribute", &matdistribute, NULL)); 38 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 39 Compute the matrix and right-hand-side vector that define 40 the linear system, Ax = b. 41 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 42 switch (matdistribute) { 43 case 1: /* very imbalanced process load for matrix A */ 44 m = (1 + size) * size; 45 nloc = (rank + 1) * n; 46 if (rank == size - 1) { /* proc[size-1] stores all remaining rows */ 47 nloc = m * n; 48 for (i = 0; i < size - 1; i++) { nloc -= (i + 1) * n; } 49 } 50 break; 51 default: /* proc[0] and proc[1] load much smaller row blocks, the rest processes have same loads */ 52 if (rank == 0 || rank == 1) { 53 nloc = n; 54 } else { 55 nloc = 10 * n; /* 10x larger load */ 56 } 57 m = 2 + (size - 2) * 10; 58 break; 59 } 60 PetscCall(MatCreate(PETSC_COMM_WORLD, &A)); 61 PetscCall(MatSetSizes(A, nloc, nloc, PETSC_DECIDE, PETSC_DECIDE)); 62 PetscCall(MatSetFromOptions(A)); 63 PetscCall(MatMPIAIJSetPreallocation(A, 5, NULL, 5, NULL)); 64 PetscCall(MatSeqAIJSetPreallocation(A, 5, NULL)); 65 PetscCall(MatSetUp(A)); 66 67 PetscCall(MatGetOwnershipRange(A, &Istart, &Iend)); 68 nloc = Iend - Istart; 69 PetscCall(PetscSynchronizedPrintf(PETSC_COMM_WORLD, "[%d] A Istart,Iend: %" PetscInt_FMT " %" PetscInt_FMT "; nloc %" PetscInt_FMT "\n", rank, Istart, Iend, nloc)); 70 PetscCall(PetscSynchronizedFlush(PETSC_COMM_WORLD, PETSC_STDOUT)); 71 72 PetscCall(PetscLogStageRegister("Assembly", &stage)); 73 PetscCall(PetscLogStagePush(stage)); 74 for (Ii = Istart; Ii < Iend; Ii++) { 75 v = -1.0; 76 i = Ii / n; 77 j = Ii - i * n; 78 if (i > 0) { 79 J = Ii - n; 80 PetscCall(MatSetValues(A, 1, &Ii, 1, &J, &v, INSERT_VALUES)); 81 } 82 if (i < m - 1) { 83 J = Ii + n; 84 PetscCall(MatSetValues(A, 1, &Ii, 1, &J, &v, INSERT_VALUES)); 85 } 86 if (j > 0) { 87 J = Ii - 1; 88 PetscCall(MatSetValues(A, 1, &Ii, 1, &J, &v, INSERT_VALUES)); 89 } 90 if (j < n - 1) { 91 J = Ii + 1; 92 PetscCall(MatSetValues(A, 1, &Ii, 1, &J, &v, INSERT_VALUES)); 93 } 94 v = 4.0; 95 PetscCall(MatSetValues(A, 1, &Ii, 1, &Ii, &v, INSERT_VALUES)); 96 } 97 PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY)); 98 PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY)); 99 PetscCall(PetscLogStagePop()); 100 101 /* A is symmetric. Set symmetric flag to enable ICC/Cholesky preconditioner */ 102 PetscCall(MatSetOption(A, MAT_SYMMETRIC, PETSC_TRUE)); 103 104 /* Create parallel vectors. */ 105 PetscCall(VecCreate(PETSC_COMM_WORLD, &u)); 106 PetscCall(VecSetSizes(u, nloc, PETSC_DECIDE)); 107 PetscCall(VecSetFromOptions(u)); 108 PetscCall(VecDuplicate(u, &b)); 109 PetscCall(VecDuplicate(b, &x)); 110 111 /* Set exact solution; then compute right-hand-side vector. */ 112 PetscCall(PetscOptionsGetBool(NULL, NULL, "-random_exact_sol", &flg, NULL)); 113 if (flg) { 114 PetscCall(PetscRandomCreate(PETSC_COMM_WORLD, &rctx)); 115 PetscCall(PetscRandomSetFromOptions(rctx)); 116 PetscCall(VecSetRandom(u, rctx)); 117 PetscCall(PetscRandomDestroy(&rctx)); 118 } else { 119 PetscCall(VecSet(u, 1.0)); 120 } 121 PetscCall(MatMult(A, u, b)); 122 123 /* View the exact solution vector if desired */ 124 flg = PETSC_FALSE; 125 PetscCall(PetscOptionsGetBool(NULL, NULL, "-view_exact_sol", &flg, NULL)); 126 if (flg) PetscCall(VecView(u, PETSC_VIEWER_STDOUT_WORLD)); 127 128 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 129 Create the linear solver and set various options 130 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 131 PetscCall(KSPCreate(PETSC_COMM_WORLD, &ksp)); 132 PetscCall(KSPSetOperators(ksp, A, A)); 133 PetscCall(KSPSetTolerances(ksp, 1.e-2 / ((m + 1) * (n + 1)), PETSC_DEFAULT, PETSC_DEFAULT, PETSC_DEFAULT)); 134 PetscCall(KSPSetFromOptions(ksp)); 135 136 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 137 Solve the linear system 138 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 139 PetscCall(KSPSolve(ksp, b, x)); 140 141 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 142 Check solution and clean up 143 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 144 PetscCall(VecAXPY(x, -1.0, u)); 145 PetscCall(VecNorm(x, NORM_2, &norm)); 146 PetscCall(KSPGetIterationNumber(ksp, &its)); 147 PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Norm of error %g iterations %" PetscInt_FMT "\n", (double)norm, its)); 148 149 /* Free work space. */ 150 PetscCall(KSPDestroy(&ksp)); 151 PetscCall(VecDestroy(&u)); 152 PetscCall(VecDestroy(&x)); 153 PetscCall(VecDestroy(&b)); 154 PetscCall(MatDestroy(&A)); 155 PetscCall(PetscFinalize()); 156 return 0; 157 } 158 159 /*TEST 160 161 test: 162 nsize: 8 163 args: -n 100 -ksp_type cg -pc_type bjacobi -sub_pc_type icc -ksp_rtol 1.e-8 164 165 test: 166 suffix: 2 167 nsize: 8 168 args: -n 100 -ksp_type preonly -pc_type redistribute -redistribute_ksp_type cg -redistribute_pc_type bjacobi -redistribute_sub_pc_type icc -redistribute_ksp_rtol 1.e-8 169 170 TEST*/ 171