1 2 static char help[] = "Demonstrates Pattern Formation with Reaction-Diffusion Equations.\n"; 3 4 /*F 5 This example is taken from the book, Numerical Solution of Time-Dependent Advection-Diffusion-Reaction Equations by 6 W. Hundsdorf and J.G. Verwer, Page 21, Pattern Formation with Reaction-Diffusion Equations 7 \begin{eqnarray*} 8 u_t = D_1 (u_{xx} + u_{yy}) - u*v^2 + \gamma(1 -u) \\ 9 v_t = D_2 (v_{xx} + v_{yy}) + u*v^2 - (\gamma + \kappa)v 10 \end{eqnarray*} 11 Unlike in the book this uses periodic boundary conditions instead of Neumann 12 (since they are easier for finite differences). 13 F*/ 14 15 /* 16 Helpful runtime monitor options: 17 -ts_monitor_draw_solution 18 -draw_save -draw_save_movie 19 20 Helpful runtime linear solver options: 21 -pc_type mg -pc_mg_galerkin pmat -da_refine 1 -snes_monitor -ksp_monitor -ts_view (note that these Jacobians are so well-conditioned multigrid may not be the best solver) 22 23 Point your browser to localhost:8080 to monitor the simulation 24 ./ex5 -ts_view_pre saws -stack_view saws -draw_save -draw_save_single_file -x_virtual -ts_monitor_draw_solution -saws_root . 25 26 */ 27 28 /* 29 30 Include "petscdmda.h" so that we can use distributed arrays (DMDAs). 31 Include "petscts.h" so that we can use SNES numerical (ODE) integrators. Note that this 32 file automatically includes: 33 petscsys.h - base PETSc routines petscvec.h - vectors 34 petscmat.h - matrices petscis.h - index sets 35 petscksp.h - Krylov subspace methods petscpc.h - preconditioners 36 petscviewer.h - viewers petscsnes.h - nonlinear solvers 37 */ 38 #include "reaction_diffusion.h" 39 #include <petscdm.h> 40 #include <petscdmda.h> 41 42 /* ------------------------------------------------------------------- */ 43 PetscErrorCode InitialConditions(DM da,Vec U) 44 { 45 PetscInt i,j,xs,ys,xm,ym,Mx,My; 46 Field **u; 47 PetscReal hx,hy,x,y; 48 49 PetscFunctionBegin; 50 PetscCall(DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE)); 51 52 hx = 2.5/(PetscReal)(Mx); 53 hy = 2.5/(PetscReal)(My); 54 55 /* 56 Get pointers to actual vector data 57 */ 58 PetscCall(DMDAVecGetArray(da,U,&u)); 59 60 /* 61 Get local grid boundaries 62 */ 63 PetscCall(DMDAGetCorners(da,&xs,&ys,NULL,&xm,&ym,NULL)); 64 65 /* 66 Compute function over the locally owned part of the grid 67 */ 68 for (j=ys; j<ys+ym; j++) { 69 y = j*hy; 70 for (i=xs; i<xs+xm; i++) { 71 x = i*hx; 72 if (PetscApproximateGTE(x,1.0) && PetscApproximateLTE(x,1.5) && PetscApproximateGTE(y,1.0) && PetscApproximateLTE(y,1.5)) u[j][i].v = PetscPowReal(PetscSinReal(4.0*PETSC_PI*x),2.0)*PetscPowReal(PetscSinReal(4.0*PETSC_PI*y),2.0)/4.0; 73 else u[j][i].v = 0.0; 74 75 u[j][i].u = 1.0 - 2.0*u[j][i].v; 76 } 77 } 78 79 /* 80 Restore access to vector 81 */ 82 PetscCall(DMDAVecRestoreArray(da,U,&u)); 83 PetscFunctionReturn(0); 84 } 85 86 int main(int argc,char **argv) 87 { 88 TS ts; /* ODE integrator */ 89 Vec x; /* solution */ 90 DM da; 91 AppCtx appctx; 92 93 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 94 Initialize program 95 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 96 PetscFunctionBeginUser; 97 PetscCall(PetscInitialize(&argc,&argv,(char*)0,help)); 98 PetscFunctionBeginUser; 99 appctx.D1 = 8.0e-5; 100 appctx.D2 = 4.0e-5; 101 appctx.gamma = .024; 102 appctx.kappa = .06; 103 appctx.aijpc = PETSC_FALSE; 104 105 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 106 Create distributed array (DMDA) to manage parallel grid and vectors 107 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 108 PetscCall(DMDACreate2d(PETSC_COMM_WORLD,DM_BOUNDARY_PERIODIC,DM_BOUNDARY_PERIODIC,DMDA_STENCIL_STAR,65,65,PETSC_DECIDE,PETSC_DECIDE,2,1,NULL,NULL,&da)); 109 PetscCall(DMSetFromOptions(da)); 110 PetscCall(DMSetUp(da)); 111 PetscCall(DMDASetFieldName(da,0,"u")); 112 PetscCall(DMDASetFieldName(da,1,"v")); 113 114 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 115 Create global vector from DMDA; this will be used to store the solution 116 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 117 PetscCall(DMCreateGlobalVector(da,&x)); 118 119 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 120 Create timestepping solver context 121 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 122 PetscCall(TSCreate(PETSC_COMM_WORLD,&ts)); 123 PetscCall(TSSetType(ts,TSARKIMEX)); 124 PetscCall(TSARKIMEXSetFullyImplicit(ts,PETSC_TRUE)); 125 PetscCall(TSSetDM(ts,da)); 126 PetscCall(TSSetProblemType(ts,TS_NONLINEAR)); 127 PetscCall(TSSetRHSFunction(ts,NULL,RHSFunction,&appctx)); 128 PetscCall(TSSetRHSJacobian(ts,NULL,NULL,RHSJacobian,&appctx)); 129 130 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 131 Set initial conditions 132 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 133 PetscCall(InitialConditions(da,x)); 134 PetscCall(TSSetSolution(ts,x)); 135 136 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 137 Set solver options 138 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 139 PetscCall(TSSetMaxTime(ts,2000.0)); 140 PetscCall(TSSetTimeStep(ts,.0001)); 141 PetscCall(TSSetExactFinalTime(ts,TS_EXACTFINALTIME_STEPOVER)); 142 PetscCall(TSSetFromOptions(ts)); 143 144 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 145 Solve ODE system 146 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 147 PetscCall(TSSolve(ts,x)); 148 149 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 150 Free work space. 151 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 152 PetscCall(VecDestroy(&x)); 153 PetscCall(TSDestroy(&ts)); 154 PetscCall(DMDestroy(&da)); 155 156 PetscCall(PetscFinalize()); 157 return 0; 158 } 159 160 /*TEST 161 162 build: 163 depends: reaction_diffusion.c 164 165 test: 166 args: -ts_view -ts_monitor -ts_max_time 500 167 requires: double 168 timeoutfactor: 3 169 170 test: 171 suffix: 2 172 args: -ts_view -ts_monitor -ts_max_time 500 -ts_monitor_draw_solution 173 requires: x double 174 output_file: output/ex5_1.out 175 timeoutfactor: 3 176 177 TEST*/ 178