1 static const char help[] = "Call PetscInitialize multiple times.\n"; 2 /* 3 This example is based on the Brusselator tutorial of the same name, but tests multiple calls to PetscInitialize(). 4 This is a bad "convergence study" because it only compares min and max values of the solution rather than comparing 5 norms of the errors. For convergence studies, we recommend invoking PetscInitialize() only once and comparing norms 6 of errors (perhaps estimated using an accurate reference solution). 7 8 Time-dependent Brusselator reaction-diffusion PDE in 1d. Demonstrates IMEX methods and multiple solves. 9 10 u_t - alpha u_xx = A + u^2 v - (B+1) u 11 v_t - alpha v_xx = B u - u^2 v 12 0 < x < 1; 13 A = 1, B = 3, alpha = 1/10 14 15 Initial conditions: 16 u(x,0) = 1 + sin(2 pi x) 17 v(x,0) = 3 18 19 Boundary conditions: 20 u(0,t) = u(1,t) = 1 21 v(0,t) = v(1,t) = 3 22 */ 23 24 #include <petscdm.h> 25 #include <petscdmda.h> 26 #include <petscts.h> 27 28 typedef struct { 29 PetscScalar u,v; 30 } Field; 31 32 typedef struct _User *User; 33 struct _User { 34 PetscReal A,B; /* Reaction coefficients */ 35 PetscReal alpha; /* Diffusion coefficient */ 36 PetscReal uleft,uright; /* Dirichlet boundary conditions */ 37 PetscReal vleft,vright; /* Dirichlet boundary conditions */ 38 }; 39 40 static PetscErrorCode FormRHSFunction(TS,PetscReal,Vec,Vec,void*); 41 static PetscErrorCode FormIFunction(TS,PetscReal,Vec,Vec,Vec,void*); 42 static PetscErrorCode FormIJacobian(TS,PetscReal,Vec,Vec,PetscReal,Mat,Mat,void*); 43 static PetscErrorCode FormInitialSolution(TS,Vec,void*); 44 static int Brusselator(int,char**,PetscInt); 45 46 int main(int argc,char **argv) 47 { 48 PetscInt cycle; 49 PetscErrorCode ierr; 50 51 ierr = MPI_Init(&argc,&argv);if (ierr) return ierr; 52 for (cycle=0; cycle<4; cycle++) { 53 ierr = Brusselator(argc,argv,cycle); 54 if (ierr) return 1; 55 } 56 ierr = MPI_Finalize(); 57 return ierr; 58 } 59 60 PetscErrorCode Brusselator(int argc,char **argv,PetscInt cycle) 61 { 62 TS ts; /* nonlinear solver */ 63 Vec X; /* solution, residual vectors */ 64 Mat J; /* Jacobian matrix */ 65 PetscInt steps,mx; 66 DM da; 67 PetscReal ftime,hx,dt,xmax,xmin; 68 struct _User user; /* user-defined work context */ 69 TSConvergedReason reason; 70 71 PetscCall(PetscInitialize(&argc,&argv,(char*)0,help)); 72 73 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 74 Create distributed array (DMDA) to manage parallel grid and vectors 75 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 76 PetscCall(DMDACreate1d(PETSC_COMM_WORLD,DM_BOUNDARY_NONE,11,2,2,NULL,&da)); 77 PetscCall(DMSetFromOptions(da)); 78 PetscCall(DMSetUp(da)); 79 80 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 81 Extract global vectors from DMDA; 82 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 83 PetscCall(DMCreateGlobalVector(da,&X)); 84 85 /* Initialize user application context */ 86 PetscOptionsBegin(PETSC_COMM_WORLD,NULL,"Advection-reaction options",""); 87 { 88 user.A = 1; 89 user.B = 3; 90 user.alpha = 0.1; 91 user.uleft = 1; 92 user.uright = 1; 93 user.vleft = 3; 94 user.vright = 3; 95 PetscCall(PetscOptionsReal("-A","Reaction rate","",user.A,&user.A,NULL)); 96 PetscCall(PetscOptionsReal("-B","Reaction rate","",user.B,&user.B,NULL)); 97 PetscCall(PetscOptionsReal("-alpha","Diffusion coefficient","",user.alpha,&user.alpha,NULL)); 98 PetscCall(PetscOptionsReal("-uleft","Dirichlet boundary condition","",user.uleft,&user.uleft,NULL)); 99 PetscCall(PetscOptionsReal("-uright","Dirichlet boundary condition","",user.uright,&user.uright,NULL)); 100 PetscCall(PetscOptionsReal("-vleft","Dirichlet boundary condition","",user.vleft,&user.vleft,NULL)); 101 PetscCall(PetscOptionsReal("-vright","Dirichlet boundary condition","",user.vright,&user.vright,NULL)); 102 } 103 PetscOptionsEnd(); 104 105 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 106 Create timestepping solver context 107 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 108 PetscCall(TSCreate(PETSC_COMM_WORLD,&ts)); 109 PetscCall(TSSetDM(ts,da)); 110 PetscCall(TSSetType(ts,TSARKIMEX)); 111 PetscCall(TSSetRHSFunction(ts,NULL,FormRHSFunction,&user)); 112 PetscCall(TSSetIFunction(ts,NULL,FormIFunction,&user)); 113 PetscCall(DMSetMatType(da,MATAIJ)); 114 PetscCall(DMCreateMatrix(da,&J)); 115 PetscCall(TSSetIJacobian(ts,J,J,FormIJacobian,&user)); 116 117 ftime = 1.0; 118 PetscCall(TSSetMaxTime(ts,ftime)); 119 PetscCall(TSSetExactFinalTime(ts,TS_EXACTFINALTIME_STEPOVER)); 120 121 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 122 Set initial conditions 123 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 124 PetscCall(FormInitialSolution(ts,X,&user)); 125 PetscCall(TSSetSolution(ts,X)); 126 PetscCall(VecGetSize(X,&mx)); 127 hx = 1.0/(PetscReal)(mx/2-1); 128 dt = 0.4 * PetscSqr(hx) / user.alpha; /* Diffusive stability limit */ 129 dt *= PetscPowRealInt(0.2,cycle); /* Shrink the time step in convergence study. */ 130 PetscCall(TSSetTimeStep(ts,dt)); 131 PetscCall(TSSetTolerances(ts,1e-3*PetscPowRealInt(0.5,cycle),NULL,1e-3*PetscPowRealInt(0.5,cycle),NULL)); 132 133 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 134 Set runtime options 135 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 136 PetscCall(TSSetFromOptions(ts)); 137 138 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 139 Solve nonlinear system 140 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 141 PetscCall(TSSolve(ts,X)); 142 PetscCall(TSGetSolveTime(ts,&ftime)); 143 PetscCall(TSGetStepNumber(ts,&steps)); 144 PetscCall(TSGetConvergedReason(ts,&reason)); 145 PetscCall(VecMin(X,NULL,&xmin)); 146 PetscCall(VecMax(X,NULL,&xmax)); 147 PetscCall(PetscPrintf(PETSC_COMM_WORLD,"%s at time %g after % 3" PetscInt_FMT " steps. Range [%6.4f,%6.4f]\n",TSConvergedReasons[reason],(double)ftime,steps,(double)xmin,(double)xmax)); 148 149 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 150 Free work space. 151 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 152 PetscCall(MatDestroy(&J)); 153 PetscCall(VecDestroy(&X)); 154 PetscCall(TSDestroy(&ts)); 155 PetscCall(DMDestroy(&da)); 156 PetscCall(PetscFinalize()); 157 return 0; 158 } 159 160 static PetscErrorCode FormIFunction(TS ts,PetscReal t,Vec X,Vec Xdot,Vec F,void *ptr) 161 { 162 User user = (User)ptr; 163 DM da; 164 DMDALocalInfo info; 165 PetscInt i; 166 Field *x,*xdot,*f; 167 PetscReal hx; 168 Vec Xloc; 169 170 PetscFunctionBeginUser; 171 PetscCall(TSGetDM(ts,&da)); 172 PetscCall(DMDAGetLocalInfo(da,&info)); 173 hx = 1.0/(PetscReal)(info.mx-1); 174 175 /* 176 Scatter ghost points to local vector,using the 2-step process 177 DMGlobalToLocalBegin(),DMGlobalToLocalEnd(). 178 By placing code between these two statements, computations can be 179 done while messages are in transition. 180 */ 181 PetscCall(DMGetLocalVector(da,&Xloc)); 182 PetscCall(DMGlobalToLocalBegin(da,X,INSERT_VALUES,Xloc)); 183 PetscCall(DMGlobalToLocalEnd(da,X,INSERT_VALUES,Xloc)); 184 185 /* Get pointers to vector data */ 186 PetscCall(DMDAVecGetArrayRead(da,Xloc,&x)); 187 PetscCall(DMDAVecGetArrayRead(da,Xdot,&xdot)); 188 PetscCall(DMDAVecGetArray(da,F,&f)); 189 190 /* Compute function over the locally owned part of the grid */ 191 for (i=info.xs; i<info.xs+info.xm; i++) { 192 if (i == 0) { 193 f[i].u = hx * (x[i].u - user->uleft); 194 f[i].v = hx * (x[i].v - user->vleft); 195 } else if (i == info.mx-1) { 196 f[i].u = hx * (x[i].u - user->uright); 197 f[i].v = hx * (x[i].v - user->vright); 198 } else { 199 f[i].u = hx * xdot[i].u - user->alpha * (x[i-1].u - 2.*x[i].u + x[i+1].u) / hx; 200 f[i].v = hx * xdot[i].v - user->alpha * (x[i-1].v - 2.*x[i].v + x[i+1].v) / hx; 201 } 202 } 203 204 /* Restore vectors */ 205 PetscCall(DMDAVecRestoreArrayRead(da,Xloc,&x)); 206 PetscCall(DMDAVecRestoreArrayRead(da,Xdot,&xdot)); 207 PetscCall(DMDAVecRestoreArray(da,F,&f)); 208 PetscCall(DMRestoreLocalVector(da,&Xloc)); 209 PetscFunctionReturn(0); 210 } 211 212 static PetscErrorCode FormRHSFunction(TS ts,PetscReal t,Vec X,Vec F,void *ptr) 213 { 214 User user = (User)ptr; 215 DM da; 216 DMDALocalInfo info; 217 PetscInt i; 218 PetscReal hx; 219 Field *x,*f; 220 221 PetscFunctionBeginUser; 222 PetscCall(TSGetDM(ts,&da)); 223 PetscCall(DMDAGetLocalInfo(da,&info)); 224 hx = 1.0/(PetscReal)(info.mx-1); 225 226 /* Get pointers to vector data */ 227 PetscCall(DMDAVecGetArrayRead(da,X,&x)); 228 PetscCall(DMDAVecGetArray(da,F,&f)); 229 230 /* Compute function over the locally owned part of the grid */ 231 for (i=info.xs; i<info.xs+info.xm; i++) { 232 PetscScalar u = x[i].u,v = x[i].v; 233 f[i].u = hx*(user->A + u*u*v - (user->B+1)*u); 234 f[i].v = hx*(user->B*u - u*u*v); 235 } 236 237 /* Restore vectors */ 238 PetscCall(DMDAVecRestoreArrayRead(da,X,&x)); 239 PetscCall(DMDAVecRestoreArray(da,F,&f)); 240 PetscFunctionReturn(0); 241 } 242 243 /* --------------------------------------------------------------------- */ 244 /* 245 IJacobian - Compute IJacobian = dF/dU + a dF/dUdot 246 */ 247 PetscErrorCode FormIJacobian(TS ts,PetscReal t,Vec X,Vec Xdot,PetscReal a,Mat J,Mat Jpre,void *ptr) 248 { 249 User user = (User)ptr; 250 DMDALocalInfo info; 251 PetscInt i; 252 PetscReal hx; 253 DM da; 254 Field *x,*xdot; 255 256 PetscFunctionBeginUser; 257 PetscCall(TSGetDM(ts,&da)); 258 PetscCall(DMDAGetLocalInfo(da,&info)); 259 hx = 1.0/(PetscReal)(info.mx-1); 260 261 /* Get pointers to vector data */ 262 PetscCall(DMDAVecGetArrayRead(da,X,&x)); 263 PetscCall(DMDAVecGetArrayRead(da,Xdot,&xdot)); 264 265 /* Compute function over the locally owned part of the grid */ 266 for (i=info.xs; i<info.xs+info.xm; i++) { 267 if (i == 0 || i == info.mx-1) { 268 const PetscInt row = i,col = i; 269 const PetscScalar vals[2][2] = {{hx,0},{0,hx}}; 270 PetscCall(MatSetValuesBlocked(Jpre,1,&row,1,&col,&vals[0][0],INSERT_VALUES)); 271 } else { 272 const PetscInt row = i,col[] = {i-1,i,i+1}; 273 const PetscScalar dxxL = -user->alpha/hx,dxx0 = 2.*user->alpha/hx,dxxR = -user->alpha/hx; 274 const PetscScalar vals[2][3][2] = {{{dxxL,0},{a *hx+dxx0,0},{dxxR,0}}, 275 {{0,dxxL},{0,a*hx+dxx0},{0,dxxR}}}; 276 PetscCall(MatSetValuesBlocked(Jpre,1,&row,3,col,&vals[0][0][0],INSERT_VALUES)); 277 } 278 } 279 280 /* Restore vectors */ 281 PetscCall(DMDAVecRestoreArrayRead(da,X,&x)); 282 PetscCall(DMDAVecRestoreArrayRead(da,Xdot,&xdot)); 283 284 PetscCall(MatAssemblyBegin(Jpre,MAT_FINAL_ASSEMBLY)); 285 PetscCall(MatAssemblyEnd(Jpre,MAT_FINAL_ASSEMBLY)); 286 if (J != Jpre) { 287 PetscCall(MatAssemblyBegin(J,MAT_FINAL_ASSEMBLY)); 288 PetscCall(MatAssemblyEnd(J,MAT_FINAL_ASSEMBLY)); 289 } 290 PetscFunctionReturn(0); 291 } 292 293 PetscErrorCode FormInitialSolution(TS ts,Vec X,void *ctx) 294 { 295 User user = (User)ctx; 296 DM da; 297 PetscInt i; 298 DMDALocalInfo info; 299 Field *x; 300 PetscReal hx; 301 302 PetscFunctionBeginUser; 303 PetscCall(TSGetDM(ts,&da)); 304 PetscCall(DMDAGetLocalInfo(da,&info)); 305 hx = 1.0/(PetscReal)(info.mx-1); 306 307 /* Get pointers to vector data */ 308 PetscCall(DMDAVecGetArray(da,X,&x)); 309 310 /* Compute function over the locally owned part of the grid */ 311 for (i=info.xs; i<info.xs+info.xm; i++) { 312 PetscReal xi = i*hx; 313 x[i].u = user->uleft*(1.-xi) + user->uright*xi + PetscSinReal(2.*PETSC_PI*xi); 314 x[i].v = user->vleft*(1.-xi) + user->vright*xi; 315 } 316 PetscCall(DMDAVecRestoreArray(da,X,&x)); 317 PetscFunctionReturn(0); 318 } 319 320 /*TEST 321 322 test: 323 args: -ts_exact_final_time INTERPOLATE -snes_rtol 1.e-3 324 325 test: 326 suffix: 2 327 args: -ts_exact_final_time INTERPOLATE -snes_rtol 1.e-3 328 329 TEST*/ 330