1 static const char help[] = "Call PetscInitialize multiple times.\n"; 2 /* 3 This example is based on the Brusselator tutorial of the same name, but tests multiple calls to PetscInitialize(). 4 This is a bad "convergence study" because it only compares min and max values of the solution rather than comparing 5 norms of the errors. For convergence studies, we recommend invoking PetscInitialize() only once and comparing norms 6 of errors (perhaps estimated using an accurate reference solution). 7 8 Time-dependent Brusselator reaction-diffusion PDE in 1d. Demonstrates IMEX methods and multiple solves. 9 10 u_t - alpha u_xx = A + u^2 v - (B+1) u 11 v_t - alpha v_xx = B u - u^2 v 12 0 < x < 1; 13 A = 1, B = 3, alpha = 1/10 14 15 Initial conditions: 16 u(x,0) = 1 + sin(2 pi x) 17 v(x,0) = 3 18 19 Boundary conditions: 20 u(0,t) = u(1,t) = 1 21 v(0,t) = v(1,t) = 3 22 */ 23 24 #include <petscdm.h> 25 #include <petscdmda.h> 26 #include <petscts.h> 27 28 typedef struct { 29 PetscScalar u, v; 30 } Field; 31 32 typedef struct _User *User; 33 struct _User { 34 PetscReal A, B; /* Reaction coefficients */ 35 PetscReal alpha; /* Diffusion coefficient */ 36 PetscReal uleft, uright; /* Dirichlet boundary conditions */ 37 PetscReal vleft, vright; /* Dirichlet boundary conditions */ 38 }; 39 40 static PetscErrorCode FormRHSFunction(TS, PetscReal, Vec, Vec, void *); 41 static PetscErrorCode FormIFunction(TS, PetscReal, Vec, Vec, Vec, void *); 42 static PetscErrorCode FormIJacobian(TS, PetscReal, Vec, Vec, PetscReal, Mat, Mat, void *); 43 static PetscErrorCode FormInitialSolution(TS, Vec, void *); 44 static int Brusselator(int, char **, PetscInt); 45 46 int main(int argc, char **argv) { 47 PetscInt cycle; 48 PetscErrorCode ierr; 49 50 ierr = MPI_Init(&argc, &argv); 51 if (ierr) return ierr; 52 for (cycle = 0; cycle < 4; cycle++) { 53 ierr = Brusselator(argc, argv, cycle); 54 if (ierr) return 1; 55 } 56 ierr = MPI_Finalize(); 57 return ierr; 58 } 59 60 PetscErrorCode Brusselator(int argc, char **argv, PetscInt cycle) { 61 TS ts; /* nonlinear solver */ 62 Vec X; /* solution, residual vectors */ 63 Mat J; /* Jacobian matrix */ 64 PetscInt steps, mx; 65 DM da; 66 PetscReal ftime, hx, dt, xmax, xmin; 67 struct _User user; /* user-defined work context */ 68 TSConvergedReason reason; 69 70 PetscFunctionBeginUser; 71 PetscCall(PetscInitialize(&argc, &argv, (char *)0, help)); 72 73 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 74 Create distributed array (DMDA) to manage parallel grid and vectors 75 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 76 PetscCall(DMDACreate1d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, 11, 2, 2, NULL, &da)); 77 PetscCall(DMSetFromOptions(da)); 78 PetscCall(DMSetUp(da)); 79 80 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 81 Extract global vectors from DMDA; 82 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 83 PetscCall(DMCreateGlobalVector(da, &X)); 84 85 /* Initialize user application context */ 86 PetscOptionsBegin(PETSC_COMM_WORLD, NULL, "Advection-reaction options", ""); 87 { 88 user.A = 1; 89 user.B = 3; 90 user.alpha = 0.1; 91 user.uleft = 1; 92 user.uright = 1; 93 user.vleft = 3; 94 user.vright = 3; 95 PetscCall(PetscOptionsReal("-A", "Reaction rate", "", user.A, &user.A, NULL)); 96 PetscCall(PetscOptionsReal("-B", "Reaction rate", "", user.B, &user.B, NULL)); 97 PetscCall(PetscOptionsReal("-alpha", "Diffusion coefficient", "", user.alpha, &user.alpha, NULL)); 98 PetscCall(PetscOptionsReal("-uleft", "Dirichlet boundary condition", "", user.uleft, &user.uleft, NULL)); 99 PetscCall(PetscOptionsReal("-uright", "Dirichlet boundary condition", "", user.uright, &user.uright, NULL)); 100 PetscCall(PetscOptionsReal("-vleft", "Dirichlet boundary condition", "", user.vleft, &user.vleft, NULL)); 101 PetscCall(PetscOptionsReal("-vright", "Dirichlet boundary condition", "", user.vright, &user.vright, NULL)); 102 } 103 PetscOptionsEnd(); 104 105 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 106 Create timestepping solver context 107 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 108 PetscCall(TSCreate(PETSC_COMM_WORLD, &ts)); 109 PetscCall(TSSetDM(ts, da)); 110 PetscCall(TSSetType(ts, TSARKIMEX)); 111 PetscCall(TSSetRHSFunction(ts, NULL, FormRHSFunction, &user)); 112 PetscCall(TSSetIFunction(ts, NULL, FormIFunction, &user)); 113 PetscCall(DMSetMatType(da, MATAIJ)); 114 PetscCall(DMCreateMatrix(da, &J)); 115 PetscCall(TSSetIJacobian(ts, J, J, FormIJacobian, &user)); 116 117 ftime = 1.0; 118 PetscCall(TSSetMaxTime(ts, ftime)); 119 PetscCall(TSSetExactFinalTime(ts, TS_EXACTFINALTIME_STEPOVER)); 120 121 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 122 Set initial conditions 123 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 124 PetscCall(FormInitialSolution(ts, X, &user)); 125 PetscCall(TSSetSolution(ts, X)); 126 PetscCall(VecGetSize(X, &mx)); 127 hx = 1.0 / (PetscReal)(mx / 2 - 1); 128 dt = 0.4 * PetscSqr(hx) / user.alpha; /* Diffusive stability limit */ 129 dt *= PetscPowRealInt(0.2, cycle); /* Shrink the time step in convergence study. */ 130 PetscCall(TSSetTimeStep(ts, dt)); 131 PetscCall(TSSetTolerances(ts, 1e-3 * PetscPowRealInt(0.5, cycle), NULL, 1e-3 * PetscPowRealInt(0.5, cycle), NULL)); 132 133 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 134 Set runtime options 135 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 136 PetscCall(TSSetFromOptions(ts)); 137 138 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 139 Solve nonlinear system 140 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 141 PetscCall(TSSolve(ts, X)); 142 PetscCall(TSGetSolveTime(ts, &ftime)); 143 PetscCall(TSGetStepNumber(ts, &steps)); 144 PetscCall(TSGetConvergedReason(ts, &reason)); 145 PetscCall(VecMin(X, NULL, &xmin)); 146 PetscCall(VecMax(X, NULL, &xmax)); 147 PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%s at time %g after % 3" PetscInt_FMT " steps. Range [%6.4f,%6.4f]\n", TSConvergedReasons[reason], (double)ftime, steps, (double)xmin, (double)xmax)); 148 149 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 150 Free work space. 151 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 152 PetscCall(MatDestroy(&J)); 153 PetscCall(VecDestroy(&X)); 154 PetscCall(TSDestroy(&ts)); 155 PetscCall(DMDestroy(&da)); 156 PetscCall(PetscFinalize()); 157 return 0; 158 } 159 160 static PetscErrorCode FormIFunction(TS ts, PetscReal t, Vec X, Vec Xdot, Vec F, void *ptr) { 161 User user = (User)ptr; 162 DM da; 163 DMDALocalInfo info; 164 PetscInt i; 165 Field *x, *xdot, *f; 166 PetscReal hx; 167 Vec Xloc; 168 169 PetscFunctionBeginUser; 170 PetscCall(TSGetDM(ts, &da)); 171 PetscCall(DMDAGetLocalInfo(da, &info)); 172 hx = 1.0 / (PetscReal)(info.mx - 1); 173 174 /* 175 Scatter ghost points to local vector,using the 2-step process 176 DMGlobalToLocalBegin(),DMGlobalToLocalEnd(). 177 By placing code between these two statements, computations can be 178 done while messages are in transition. 179 */ 180 PetscCall(DMGetLocalVector(da, &Xloc)); 181 PetscCall(DMGlobalToLocalBegin(da, X, INSERT_VALUES, Xloc)); 182 PetscCall(DMGlobalToLocalEnd(da, X, INSERT_VALUES, Xloc)); 183 184 /* Get pointers to vector data */ 185 PetscCall(DMDAVecGetArrayRead(da, Xloc, &x)); 186 PetscCall(DMDAVecGetArrayRead(da, Xdot, &xdot)); 187 PetscCall(DMDAVecGetArray(da, F, &f)); 188 189 /* Compute function over the locally owned part of the grid */ 190 for (i = info.xs; i < info.xs + info.xm; i++) { 191 if (i == 0) { 192 f[i].u = hx * (x[i].u - user->uleft); 193 f[i].v = hx * (x[i].v - user->vleft); 194 } else if (i == info.mx - 1) { 195 f[i].u = hx * (x[i].u - user->uright); 196 f[i].v = hx * (x[i].v - user->vright); 197 } else { 198 f[i].u = hx * xdot[i].u - user->alpha * (x[i - 1].u - 2. * x[i].u + x[i + 1].u) / hx; 199 f[i].v = hx * xdot[i].v - user->alpha * (x[i - 1].v - 2. * x[i].v + x[i + 1].v) / hx; 200 } 201 } 202 203 /* Restore vectors */ 204 PetscCall(DMDAVecRestoreArrayRead(da, Xloc, &x)); 205 PetscCall(DMDAVecRestoreArrayRead(da, Xdot, &xdot)); 206 PetscCall(DMDAVecRestoreArray(da, F, &f)); 207 PetscCall(DMRestoreLocalVector(da, &Xloc)); 208 PetscFunctionReturn(0); 209 } 210 211 static PetscErrorCode FormRHSFunction(TS ts, PetscReal t, Vec X, Vec F, void *ptr) { 212 User user = (User)ptr; 213 DM da; 214 DMDALocalInfo info; 215 PetscInt i; 216 PetscReal hx; 217 Field *x, *f; 218 219 PetscFunctionBeginUser; 220 PetscCall(TSGetDM(ts, &da)); 221 PetscCall(DMDAGetLocalInfo(da, &info)); 222 hx = 1.0 / (PetscReal)(info.mx - 1); 223 224 /* Get pointers to vector data */ 225 PetscCall(DMDAVecGetArrayRead(da, X, &x)); 226 PetscCall(DMDAVecGetArray(da, F, &f)); 227 228 /* Compute function over the locally owned part of the grid */ 229 for (i = info.xs; i < info.xs + info.xm; i++) { 230 PetscScalar u = x[i].u, v = x[i].v; 231 f[i].u = hx * (user->A + u * u * v - (user->B + 1) * u); 232 f[i].v = hx * (user->B * u - u * u * v); 233 } 234 235 /* Restore vectors */ 236 PetscCall(DMDAVecRestoreArrayRead(da, X, &x)); 237 PetscCall(DMDAVecRestoreArray(da, F, &f)); 238 PetscFunctionReturn(0); 239 } 240 241 /* --------------------------------------------------------------------- */ 242 /* 243 IJacobian - Compute IJacobian = dF/dU + a dF/dUdot 244 */ 245 PetscErrorCode FormIJacobian(TS ts, PetscReal t, Vec X, Vec Xdot, PetscReal a, Mat J, Mat Jpre, void *ptr) { 246 User user = (User)ptr; 247 DMDALocalInfo info; 248 PetscInt i; 249 PetscReal hx; 250 DM da; 251 Field *x, *xdot; 252 253 PetscFunctionBeginUser; 254 PetscCall(TSGetDM(ts, &da)); 255 PetscCall(DMDAGetLocalInfo(da, &info)); 256 hx = 1.0 / (PetscReal)(info.mx - 1); 257 258 /* Get pointers to vector data */ 259 PetscCall(DMDAVecGetArrayRead(da, X, &x)); 260 PetscCall(DMDAVecGetArrayRead(da, Xdot, &xdot)); 261 262 /* Compute function over the locally owned part of the grid */ 263 for (i = info.xs; i < info.xs + info.xm; i++) { 264 if (i == 0 || i == info.mx - 1) { 265 const PetscInt row = i, col = i; 266 const PetscScalar vals[2][2] = { 267 {hx, 0 }, 268 {0, hx} 269 }; 270 PetscCall(MatSetValuesBlocked(Jpre, 1, &row, 1, &col, &vals[0][0], INSERT_VALUES)); 271 } else { 272 const PetscInt row = i, col[] = {i - 1, i, i + 1}; 273 const PetscScalar dxxL = -user->alpha / hx, dxx0 = 2. * user->alpha / hx, dxxR = -user->alpha / hx; 274 const PetscScalar vals[2][3][2] = { 275 {{dxxL, 0}, {a * hx + dxx0, 0}, {dxxR, 0}}, 276 {{0, dxxL}, {0, a * hx + dxx0}, {0, dxxR}} 277 }; 278 PetscCall(MatSetValuesBlocked(Jpre, 1, &row, 3, col, &vals[0][0][0], INSERT_VALUES)); 279 } 280 } 281 282 /* Restore vectors */ 283 PetscCall(DMDAVecRestoreArrayRead(da, X, &x)); 284 PetscCall(DMDAVecRestoreArrayRead(da, Xdot, &xdot)); 285 286 PetscCall(MatAssemblyBegin(Jpre, MAT_FINAL_ASSEMBLY)); 287 PetscCall(MatAssemblyEnd(Jpre, MAT_FINAL_ASSEMBLY)); 288 if (J != Jpre) { 289 PetscCall(MatAssemblyBegin(J, MAT_FINAL_ASSEMBLY)); 290 PetscCall(MatAssemblyEnd(J, MAT_FINAL_ASSEMBLY)); 291 } 292 PetscFunctionReturn(0); 293 } 294 295 PetscErrorCode FormInitialSolution(TS ts, Vec X, void *ctx) { 296 User user = (User)ctx; 297 DM da; 298 PetscInt i; 299 DMDALocalInfo info; 300 Field *x; 301 PetscReal hx; 302 303 PetscFunctionBeginUser; 304 PetscCall(TSGetDM(ts, &da)); 305 PetscCall(DMDAGetLocalInfo(da, &info)); 306 hx = 1.0 / (PetscReal)(info.mx - 1); 307 308 /* Get pointers to vector data */ 309 PetscCall(DMDAVecGetArray(da, X, &x)); 310 311 /* Compute function over the locally owned part of the grid */ 312 for (i = info.xs; i < info.xs + info.xm; i++) { 313 PetscReal xi = i * hx; 314 x[i].u = user->uleft * (1. - xi) + user->uright * xi + PetscSinReal(2. * PETSC_PI * xi); 315 x[i].v = user->vleft * (1. - xi) + user->vright * xi; 316 } 317 PetscCall(DMDAVecRestoreArray(da, X, &x)); 318 PetscFunctionReturn(0); 319 } 320 321 /*TEST 322 323 test: 324 args: -ts_exact_final_time INTERPOLATE -snes_rtol 1.e-3 325 326 test: 327 suffix: 2 328 args: -ts_exact_final_time INTERPOLATE -snes_rtol 1.e-3 329 330 TEST*/ 331