xref: /petsc/src/ts/tutorials/advection-diffusion-reaction/ex4.c (revision ffa8c5705e8ab2cf85ee1d14dbe507a6e2eb5283)
1 
2 static char help[] = "Chemo-taxis Problems from Mathematical Biology.\n";
3 
4 /*
5      Page 18, Chemo-taxis Problems from Mathematical Biology
6 
7         rho_t =
8         c_t   =
9 
10      Further discusson on Page 134 and in 2d on Page 409
11 */
12 
13 /*
14 
15    Include "petscdmda.h" so that we can use distributed arrays (DMDAs).
16    Include "petscts.h" so that we can use SNES solvers.  Note that this
17    file automatically includes:
18      petscsys.h       - base PETSc routines   petscvec.h - vectors
19      petscmat.h - matrices
20      petscis.h     - index sets            petscksp.h - Krylov subspace methods
21      petscviewer.h - viewers               petscpc.h  - preconditioners
22      petscksp.h   - linear solvers
23 */
24 
25 #include <petscdm.h>
26 #include <petscdmda.h>
27 #include <petscts.h>
28 
29 typedef struct {
30   PetscScalar rho,c;
31 } Field;
32 
33 typedef struct {
34   PetscScalar epsilon,delta,alpha,beta,gamma,kappa,lambda,mu,cstar;
35   PetscBool   upwind;
36 } AppCtx;
37 
38 /*
39    User-defined routines
40 */
41 extern PetscErrorCode IFunction(TS,PetscReal,Vec,Vec,Vec,void*),InitialConditions(DM,Vec);
42 
43 int main(int argc,char **argv)
44 {
45   TS             ts;                  /* nonlinear solver */
46   Vec            U;                   /* solution, residual vectors */
47   DM             da;
48   AppCtx         appctx;
49 
50   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
51      Initialize program
52      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
53   PetscCall(PetscInitialize(&argc,&argv,(char*)0,help));
54 
55   appctx.epsilon = 1.0e-3;
56   appctx.delta   = 1.0;
57   appctx.alpha   = 10.0;
58   appctx.beta    = 4.0;
59   appctx.gamma   = 1.0;
60   appctx.kappa   = .75;
61   appctx.lambda  = 1.0;
62   appctx.mu      = 100.;
63   appctx.cstar   = .2;
64   appctx.upwind  = PETSC_TRUE;
65 
66   PetscCall(PetscOptionsGetScalar(NULL,NULL,"-delta",&appctx.delta,NULL));
67   PetscCall(PetscOptionsGetBool(NULL,NULL,"-upwind",&appctx.upwind,NULL));
68 
69   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
70      Create distributed array (DMDA) to manage parallel grid and vectors
71   - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
72   PetscCall(DMDACreate1d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE,8,2,1,NULL,&da));
73   PetscCall(DMSetFromOptions(da));
74   PetscCall(DMSetUp(da));
75   PetscCall(DMDASetFieldName(da,0,"rho"));
76   PetscCall(DMDASetFieldName(da,1,"c"));
77 
78   /*  - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
79      Extract global vectors from DMDA; then duplicate for remaining
80      vectors that are the same types
81    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
82   PetscCall(DMCreateGlobalVector(da,&U));
83 
84   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
85      Create timestepping solver context
86      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
87   PetscCall(TSCreate(PETSC_COMM_WORLD,&ts));
88   PetscCall(TSSetType(ts,TSROSW));
89   PetscCall(TSSetDM(ts,da));
90   PetscCall(TSSetProblemType(ts,TS_NONLINEAR));
91   PetscCall(TSSetIFunction(ts,NULL,IFunction,&appctx));
92 
93   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
94      Set initial conditions
95    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
96   PetscCall(InitialConditions(da,U));
97   PetscCall(TSSetSolution(ts,U));
98 
99   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
100      Set solver options
101    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
102   PetscCall(TSSetTimeStep(ts,.0001));
103   PetscCall(TSSetMaxTime(ts,1.0));
104   PetscCall(TSSetExactFinalTime(ts,TS_EXACTFINALTIME_STEPOVER));
105   PetscCall(TSSetFromOptions(ts));
106 
107   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
108      Solve nonlinear system
109      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
110   PetscCall(TSSolve(ts,U));
111 
112   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
113      Free work space.  All PETSc objects should be destroyed when they
114      are no longer needed.
115    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
116   PetscCall(VecDestroy(&U));
117   PetscCall(TSDestroy(&ts));
118   PetscCall(DMDestroy(&da));
119 
120   PetscCall(PetscFinalize());
121   return 0;
122 }
123 /* ------------------------------------------------------------------- */
124 /*
125    IFunction - Evaluates nonlinear function, F(U).
126 
127    Input Parameters:
128 .  ts - the TS context
129 .  U - input vector
130 .  ptr - optional user-defined context, as set by SNESSetFunction()
131 
132    Output Parameter:
133 .  F - function vector
134  */
135 PetscErrorCode IFunction(TS ts,PetscReal ftime,Vec U,Vec Udot,Vec F,void *ptr)
136 {
137   AppCtx         *appctx = (AppCtx*)ptr;
138   DM             da;
139   PetscInt       i,Mx,xs,xm;
140   PetscReal      hx,sx;
141   PetscScalar    rho,c,rhoxx,cxx,cx,rhox,kcxrhox;
142   Field          *u,*f,*udot;
143   Vec            localU;
144 
145   PetscFunctionBegin;
146   PetscCall(TSGetDM(ts,&da));
147   PetscCall(DMGetLocalVector(da,&localU));
148   PetscCall(DMDAGetInfo(da,PETSC_IGNORE,&Mx,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE));
149 
150   hx = 1.0/(PetscReal)(Mx-1); sx = 1.0/(hx*hx);
151 
152   /*
153      Scatter ghost points to local vector,using the 2-step process
154         DMGlobalToLocalBegin(),DMGlobalToLocalEnd().
155      By placing code between these two statements, computations can be
156      done while messages are in transition.
157   */
158   PetscCall(DMGlobalToLocalBegin(da,U,INSERT_VALUES,localU));
159   PetscCall(DMGlobalToLocalEnd(da,U,INSERT_VALUES,localU));
160 
161   /*
162      Get pointers to vector data
163   */
164   PetscCall(DMDAVecGetArrayRead(da,localU,&u));
165   PetscCall(DMDAVecGetArrayRead(da,Udot,&udot));
166   PetscCall(DMDAVecGetArrayWrite(da,F,&f));
167 
168   /*
169      Get local grid boundaries
170   */
171   PetscCall(DMDAGetCorners(da,&xs,NULL,NULL,&xm,NULL,NULL));
172 
173   if (!xs) {
174     f[0].rho = udot[0].rho; /* u[0].rho - 0.0; */
175     f[0].c   = udot[0].c; /* u[0].c   - 1.0; */
176     xs++;
177     xm--;
178   }
179   if (xs+xm == Mx) {
180     f[Mx-1].rho = udot[Mx-1].rho; /* u[Mx-1].rho - 1.0; */
181     f[Mx-1].c   = udot[Mx-1].c;  /* u[Mx-1].c   - 0.0;  */
182     xm--;
183   }
184 
185   /*
186      Compute function over the locally owned part of the grid
187   */
188   for (i=xs; i<xs+xm; i++) {
189     rho   = u[i].rho;
190     rhoxx = (-2.0*rho + u[i-1].rho + u[i+1].rho)*sx;
191     c     = u[i].c;
192     cxx   = (-2.0*c + u[i-1].c + u[i+1].c)*sx;
193 
194     if (!appctx->upwind) {
195       rhox    = .5*(u[i+1].rho - u[i-1].rho)/hx;
196       cx      = .5*(u[i+1].c - u[i-1].c)/hx;
197       kcxrhox = appctx->kappa*(cxx*rho + cx*rhox);
198     } else {
199       kcxrhox = appctx->kappa*((u[i+1].c - u[i].c)*u[i+1].rho - (u[i].c - u[i-1].c)*u[i].rho)*sx;
200     }
201 
202     f[i].rho = udot[i].rho - appctx->epsilon*rhoxx + kcxrhox  - appctx->mu*PetscAbsScalar(rho)*(1.0 - rho)*PetscMax(0,PetscRealPart(c - appctx->cstar)) + appctx->beta*rho;
203     f[i].c   = udot[i].c - appctx->delta*cxx + appctx->lambda*c + appctx->alpha*rho*c/(appctx->gamma + c);
204   }
205 
206   /*
207      Restore vectors
208   */
209   PetscCall(DMDAVecRestoreArrayRead(da,localU,&u));
210   PetscCall(DMDAVecRestoreArrayRead(da,Udot,&udot));
211   PetscCall(DMDAVecRestoreArrayWrite(da,F,&f));
212   PetscCall(DMRestoreLocalVector(da,&localU));
213   PetscFunctionReturn(0);
214 }
215 
216 /* ------------------------------------------------------------------- */
217 PetscErrorCode InitialConditions(DM da,Vec U)
218 {
219   PetscInt       i,xs,xm,Mx;
220   Field          *u;
221   PetscReal      hx,x;
222 
223   PetscFunctionBegin;
224   PetscCall(DMDAGetInfo(da,PETSC_IGNORE,&Mx,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE));
225 
226   hx = 1.0/(PetscReal)(Mx-1);
227 
228   /*
229      Get pointers to vector data
230   */
231   PetscCall(DMDAVecGetArrayWrite(da,U,&u));
232 
233   /*
234      Get local grid boundaries
235   */
236   PetscCall(DMDAGetCorners(da,&xs,NULL,NULL,&xm,NULL,NULL));
237 
238   /*
239      Compute function over the locally owned part of the grid
240   */
241   for (i=xs; i<xs+xm; i++) {
242     x = i*hx;
243     if (i < Mx-1) u[i].rho = 0.0;
244     else          u[i].rho = 1.0;
245     u[i].c = PetscCosReal(.5*PETSC_PI*x);
246   }
247 
248   /*
249      Restore vectors
250   */
251   PetscCall(DMDAVecRestoreArrayWrite(da,U,&u));
252   PetscFunctionReturn(0);
253 }
254 
255 /*TEST
256 
257    test:
258       args: -pc_type lu -da_refine 2  -ts_view  -ts_monitor -ts_max_time 1
259       requires: double
260 
261    test:
262      suffix: 2
263      args:  -pc_type lu -da_refine 2  -ts_view  -ts_monitor_draw_solution -ts_monitor -ts_max_time 1
264      requires: x double
265      output_file: output/ex4_1.out
266 
267 TEST*/
268