xref: /petsc/src/ts/tutorials/advection-diffusion-reaction/ex5.c (revision e600fa544e2bb197ca2af9b6e65ea465976dec56)
1 
2 static char help[] = "Demonstrates Pattern Formation with Reaction-Diffusion Equations.\n";
3 
4 /*F
5      This example is taken from the book, Numerical Solution of Time-Dependent Advection-Diffusion-Reaction Equations by
6       W. Hundsdorf and J.G. Verwer,  Page 21, Pattern Formation with Reaction-Diffusion Equations
7 \begin{eqnarray*}
8         u_t = D_1 (u_{xx} + u_{yy})  - u*v^2 + \gamma(1 -u)           \\
9         v_t = D_2 (v_{xx} + v_{yy})  + u*v^2 - (\gamma + \kappa)v
10 \end{eqnarray*}
11     Unlike in the book this uses periodic boundary conditions instead of Neumann
12     (since they are easier for finite differences).
13 F*/
14 
15 /*
16       Helpful runtime monitor options:
17            -ts_monitor_draw_solution
18            -draw_save -draw_save_movie
19 
20       Helpful runtime linear solver options:
21            -pc_type mg -pc_mg_galerkin pmat -da_refine 1 -snes_monitor -ksp_monitor -ts_view  (note that these Jacobians are so well-conditioned multigrid may not be the best solver)
22 
23       Point your browser to localhost:8080 to monitor the simulation
24            ./ex5  -ts_view_pre saws  -stack_view saws -draw_save -draw_save_single_file -x_virtual -ts_monitor_draw_solution -saws_root .
25 
26 */
27 
28 /*
29 
30    Include "petscdmda.h" so that we can use distributed arrays (DMDAs).
31    Include "petscts.h" so that we can use SNES numerical (ODE) integrators.  Note that this
32    file automatically includes:
33      petscsys.h       - base PETSc routines   petscvec.h  - vectors
34      petscmat.h - matrices                    petscis.h   - index sets
35      petscksp.h - Krylov subspace methods     petscpc.h   - preconditioners
36      petscviewer.h - viewers                  petscsnes.h - nonlinear solvers
37 */
38 #include "reaction_diffusion.h"
39 #include <petscdm.h>
40 #include <petscdmda.h>
41 
42 /* ------------------------------------------------------------------- */
43 PetscErrorCode InitialConditions(DM da,Vec U)
44 {
45   PetscErrorCode ierr;
46   PetscInt       i,j,xs,ys,xm,ym,Mx,My;
47   Field          **u;
48   PetscReal      hx,hy,x,y;
49 
50   PetscFunctionBegin;
51   ierr = DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE);CHKERRQ(ierr);
52 
53   hx = 2.5/(PetscReal)(Mx);
54   hy = 2.5/(PetscReal)(My);
55 
56   /*
57      Get pointers to actual vector data
58   */
59   ierr = DMDAVecGetArray(da,U,&u);CHKERRQ(ierr);
60 
61   /*
62      Get local grid boundaries
63   */
64   ierr = DMDAGetCorners(da,&xs,&ys,NULL,&xm,&ym,NULL);CHKERRQ(ierr);
65 
66   /*
67      Compute function over the locally owned part of the grid
68   */
69   for (j=ys; j<ys+ym; j++) {
70     y = j*hy;
71     for (i=xs; i<xs+xm; i++) {
72       x = i*hx;
73       if (PetscApproximateGTE(x,1.0) && PetscApproximateLTE(x,1.5) && PetscApproximateGTE(y,1.0) && PetscApproximateLTE(y,1.5)) u[j][i].v = PetscPowReal(PetscSinReal(4.0*PETSC_PI*x),2.0)*PetscPowReal(PetscSinReal(4.0*PETSC_PI*y),2.0)/4.0;
74       else u[j][i].v = 0.0;
75 
76       u[j][i].u = 1.0 - 2.0*u[j][i].v;
77     }
78   }
79 
80   /*
81      Restore access to vector
82   */
83   ierr = DMDAVecRestoreArray(da,U,&u);CHKERRQ(ierr);
84   PetscFunctionReturn(0);
85 }
86 
87 int main(int argc,char **argv)
88 {
89   TS             ts;                  /* ODE integrator */
90   Vec            x;                   /* solution */
91   PetscErrorCode ierr;
92   DM             da;
93   AppCtx         appctx;
94 
95   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
96      Initialize program
97      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
98   ierr = PetscInitialize(&argc,&argv,(char*)0,help);if (ierr) return ierr;
99   PetscFunctionBeginUser;
100   appctx.D1    = 8.0e-5;
101   appctx.D2    = 4.0e-5;
102   appctx.gamma = .024;
103   appctx.kappa = .06;
104   appctx.aijpc = PETSC_FALSE;
105 
106   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
107      Create distributed array (DMDA) to manage parallel grid and vectors
108   - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
109   ierr = DMDACreate2d(PETSC_COMM_WORLD,DM_BOUNDARY_PERIODIC,DM_BOUNDARY_PERIODIC,DMDA_STENCIL_STAR,65,65,PETSC_DECIDE,PETSC_DECIDE,2,1,NULL,NULL,&da);CHKERRQ(ierr);
110   ierr = DMSetFromOptions(da);CHKERRQ(ierr);
111   ierr = DMSetUp(da);CHKERRQ(ierr);
112   ierr = DMDASetFieldName(da,0,"u");CHKERRQ(ierr);
113   ierr = DMDASetFieldName(da,1,"v");CHKERRQ(ierr);
114 
115   /*  - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
116      Create global vector from DMDA; this will be used to store the solution
117    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
118   ierr = DMCreateGlobalVector(da,&x);CHKERRQ(ierr);
119 
120   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
121      Create timestepping solver context
122      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
123   ierr = TSCreate(PETSC_COMM_WORLD,&ts);CHKERRQ(ierr);
124   ierr = TSSetType(ts,TSARKIMEX);CHKERRQ(ierr);
125   ierr = TSARKIMEXSetFullyImplicit(ts,PETSC_TRUE);CHKERRQ(ierr);
126   ierr = TSSetDM(ts,da);CHKERRQ(ierr);
127   ierr = TSSetProblemType(ts,TS_NONLINEAR);CHKERRQ(ierr);
128   ierr = TSSetRHSFunction(ts,NULL,RHSFunction,&appctx);CHKERRQ(ierr);
129   ierr = TSSetRHSJacobian(ts,NULL,NULL,RHSJacobian,&appctx);CHKERRQ(ierr);
130 
131   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
132      Set initial conditions
133    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
134   ierr = InitialConditions(da,x);CHKERRQ(ierr);
135   ierr = TSSetSolution(ts,x);CHKERRQ(ierr);
136 
137   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
138      Set solver options
139    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
140   ierr = TSSetMaxTime(ts,2000.0);CHKERRQ(ierr);
141   ierr = TSSetTimeStep(ts,.0001);CHKERRQ(ierr);
142   ierr = TSSetExactFinalTime(ts,TS_EXACTFINALTIME_STEPOVER);CHKERRQ(ierr);
143   ierr = TSSetFromOptions(ts);CHKERRQ(ierr);
144 
145   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
146      Solve ODE system
147      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
148   ierr = TSSolve(ts,x);CHKERRQ(ierr);
149 
150   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
151      Free work space.
152    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
153   ierr = VecDestroy(&x);CHKERRQ(ierr);
154   ierr = TSDestroy(&ts);CHKERRQ(ierr);
155   ierr = DMDestroy(&da);CHKERRQ(ierr);
156 
157   ierr = PetscFinalize();
158   return ierr;
159 }
160 
161 /*TEST
162 
163    build:
164      depends: reaction_diffusion.c
165 
166    test:
167       args: -ts_view  -ts_monitor -ts_max_time 500
168       requires: double
169       timeoutfactor: 3
170 
171    test:
172       suffix: 2
173       args: -ts_view  -ts_monitor -ts_max_time 500 -ts_monitor_draw_solution
174       requires: x double
175       output_file: output/ex5_1.out
176       timeoutfactor: 3
177 
178 TEST*/
179