xref: /petsc/src/ts/tutorials/advection-diffusion-reaction/ex4.c (revision ebead697dbf761eb322f829370bbe90b3bd93fa3) !
1 
2 static char help[] = "Chemo-taxis Problems from Mathematical Biology.\n";
3 
4 /*
5      Page 18, Chemo-taxis Problems from Mathematical Biology
6 
7         rho_t =
8         c_t   =
9 
10      Further discusson on Page 134 and in 2d on Page 409
11 */
12 
13 /*
14 
15    Include "petscdmda.h" so that we can use distributed arrays (DMDAs).
16    Include "petscts.h" so that we can use SNES solvers.  Note that this
17    file automatically includes:
18      petscsys.h       - base PETSc routines   petscvec.h - vectors
19      petscmat.h - matrices
20      petscis.h     - index sets            petscksp.h - Krylov subspace methods
21      petscviewer.h - viewers               petscpc.h  - preconditioners
22      petscksp.h   - linear solvers
23 */
24 
25 #include <petscdm.h>
26 #include <petscdmda.h>
27 #include <petscts.h>
28 
29 typedef struct {
30   PetscScalar rho,c;
31 } Field;
32 
33 typedef struct {
34   PetscScalar epsilon,delta,alpha,beta,gamma,kappa,lambda,mu,cstar;
35   PetscBool   upwind;
36 } AppCtx;
37 
38 /*
39    User-defined routines
40 */
41 extern PetscErrorCode IFunction(TS,PetscReal,Vec,Vec,Vec,void*),InitialConditions(DM,Vec);
42 
43 int main(int argc,char **argv)
44 {
45   TS             ts;                  /* nonlinear solver */
46   Vec            U;                   /* solution, residual vectors */
47   DM             da;
48   AppCtx         appctx;
49 
50   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
51      Initialize program
52      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
53   PetscFunctionBeginUser;
54   PetscCall(PetscInitialize(&argc,&argv,(char*)0,help));
55 
56   appctx.epsilon = 1.0e-3;
57   appctx.delta   = 1.0;
58   appctx.alpha   = 10.0;
59   appctx.beta    = 4.0;
60   appctx.gamma   = 1.0;
61   appctx.kappa   = .75;
62   appctx.lambda  = 1.0;
63   appctx.mu      = 100.;
64   appctx.cstar   = .2;
65   appctx.upwind  = PETSC_TRUE;
66 
67   PetscCall(PetscOptionsGetScalar(NULL,NULL,"-delta",&appctx.delta,NULL));
68   PetscCall(PetscOptionsGetBool(NULL,NULL,"-upwind",&appctx.upwind,NULL));
69 
70   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
71      Create distributed array (DMDA) to manage parallel grid and vectors
72   - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
73   PetscCall(DMDACreate1d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE,8,2,1,NULL,&da));
74   PetscCall(DMSetFromOptions(da));
75   PetscCall(DMSetUp(da));
76   PetscCall(DMDASetFieldName(da,0,"rho"));
77   PetscCall(DMDASetFieldName(da,1,"c"));
78 
79   /*  - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
80      Extract global vectors from DMDA; then duplicate for remaining
81      vectors that are the same types
82    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
83   PetscCall(DMCreateGlobalVector(da,&U));
84 
85   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
86      Create timestepping solver context
87      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
88   PetscCall(TSCreate(PETSC_COMM_WORLD,&ts));
89   PetscCall(TSSetType(ts,TSROSW));
90   PetscCall(TSSetDM(ts,da));
91   PetscCall(TSSetProblemType(ts,TS_NONLINEAR));
92   PetscCall(TSSetIFunction(ts,NULL,IFunction,&appctx));
93 
94   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
95      Set initial conditions
96    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
97   PetscCall(InitialConditions(da,U));
98   PetscCall(TSSetSolution(ts,U));
99 
100   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
101      Set solver options
102    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
103   PetscCall(TSSetTimeStep(ts,.0001));
104   PetscCall(TSSetMaxTime(ts,1.0));
105   PetscCall(TSSetExactFinalTime(ts,TS_EXACTFINALTIME_STEPOVER));
106   PetscCall(TSSetFromOptions(ts));
107 
108   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
109      Solve nonlinear system
110      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
111   PetscCall(TSSolve(ts,U));
112 
113   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
114      Free work space.  All PETSc objects should be destroyed when they
115      are no longer needed.
116    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
117   PetscCall(VecDestroy(&U));
118   PetscCall(TSDestroy(&ts));
119   PetscCall(DMDestroy(&da));
120 
121   PetscCall(PetscFinalize());
122   return 0;
123 }
124 /* ------------------------------------------------------------------- */
125 /*
126    IFunction - Evaluates nonlinear function, F(U).
127 
128    Input Parameters:
129 .  ts - the TS context
130 .  U - input vector
131 .  ptr - optional user-defined context, as set by SNESSetFunction()
132 
133    Output Parameter:
134 .  F - function vector
135  */
136 PetscErrorCode IFunction(TS ts,PetscReal ftime,Vec U,Vec Udot,Vec F,void *ptr)
137 {
138   AppCtx         *appctx = (AppCtx*)ptr;
139   DM             da;
140   PetscInt       i,Mx,xs,xm;
141   PetscReal      hx,sx;
142   PetscScalar    rho,c,rhoxx,cxx,cx,rhox,kcxrhox;
143   Field          *u,*f,*udot;
144   Vec            localU;
145 
146   PetscFunctionBegin;
147   PetscCall(TSGetDM(ts,&da));
148   PetscCall(DMGetLocalVector(da,&localU));
149   PetscCall(DMDAGetInfo(da,PETSC_IGNORE,&Mx,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE));
150 
151   hx = 1.0/(PetscReal)(Mx-1); sx = 1.0/(hx*hx);
152 
153   /*
154      Scatter ghost points to local vector,using the 2-step process
155         DMGlobalToLocalBegin(),DMGlobalToLocalEnd().
156      By placing code between these two statements, computations can be
157      done while messages are in transition.
158   */
159   PetscCall(DMGlobalToLocalBegin(da,U,INSERT_VALUES,localU));
160   PetscCall(DMGlobalToLocalEnd(da,U,INSERT_VALUES,localU));
161 
162   /*
163      Get pointers to vector data
164   */
165   PetscCall(DMDAVecGetArrayRead(da,localU,&u));
166   PetscCall(DMDAVecGetArrayRead(da,Udot,&udot));
167   PetscCall(DMDAVecGetArrayWrite(da,F,&f));
168 
169   /*
170      Get local grid boundaries
171   */
172   PetscCall(DMDAGetCorners(da,&xs,NULL,NULL,&xm,NULL,NULL));
173 
174   if (!xs) {
175     f[0].rho = udot[0].rho; /* u[0].rho - 0.0; */
176     f[0].c   = udot[0].c; /* u[0].c   - 1.0; */
177     xs++;
178     xm--;
179   }
180   if (xs+xm == Mx) {
181     f[Mx-1].rho = udot[Mx-1].rho; /* u[Mx-1].rho - 1.0; */
182     f[Mx-1].c   = udot[Mx-1].c;  /* u[Mx-1].c   - 0.0;  */
183     xm--;
184   }
185 
186   /*
187      Compute function over the locally owned part of the grid
188   */
189   for (i=xs; i<xs+xm; i++) {
190     rho   = u[i].rho;
191     rhoxx = (-2.0*rho + u[i-1].rho + u[i+1].rho)*sx;
192     c     = u[i].c;
193     cxx   = (-2.0*c + u[i-1].c + u[i+1].c)*sx;
194 
195     if (!appctx->upwind) {
196       rhox    = .5*(u[i+1].rho - u[i-1].rho)/hx;
197       cx      = .5*(u[i+1].c - u[i-1].c)/hx;
198       kcxrhox = appctx->kappa*(cxx*rho + cx*rhox);
199     } else {
200       kcxrhox = appctx->kappa*((u[i+1].c - u[i].c)*u[i+1].rho - (u[i].c - u[i-1].c)*u[i].rho)*sx;
201     }
202 
203     f[i].rho = udot[i].rho - appctx->epsilon*rhoxx + kcxrhox  - appctx->mu*PetscAbsScalar(rho)*(1.0 - rho)*PetscMax(0,PetscRealPart(c - appctx->cstar)) + appctx->beta*rho;
204     f[i].c   = udot[i].c - appctx->delta*cxx + appctx->lambda*c + appctx->alpha*rho*c/(appctx->gamma + c);
205   }
206 
207   /*
208      Restore vectors
209   */
210   PetscCall(DMDAVecRestoreArrayRead(da,localU,&u));
211   PetscCall(DMDAVecRestoreArrayRead(da,Udot,&udot));
212   PetscCall(DMDAVecRestoreArrayWrite(da,F,&f));
213   PetscCall(DMRestoreLocalVector(da,&localU));
214   PetscFunctionReturn(0);
215 }
216 
217 /* ------------------------------------------------------------------- */
218 PetscErrorCode InitialConditions(DM da,Vec U)
219 {
220   PetscInt       i,xs,xm,Mx;
221   Field          *u;
222   PetscReal      hx,x;
223 
224   PetscFunctionBegin;
225   PetscCall(DMDAGetInfo(da,PETSC_IGNORE,&Mx,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE));
226 
227   hx = 1.0/(PetscReal)(Mx-1);
228 
229   /*
230      Get pointers to vector data
231   */
232   PetscCall(DMDAVecGetArrayWrite(da,U,&u));
233 
234   /*
235      Get local grid boundaries
236   */
237   PetscCall(DMDAGetCorners(da,&xs,NULL,NULL,&xm,NULL,NULL));
238 
239   /*
240      Compute function over the locally owned part of the grid
241   */
242   for (i=xs; i<xs+xm; i++) {
243     x = i*hx;
244     if (i < Mx-1) u[i].rho = 0.0;
245     else          u[i].rho = 1.0;
246     u[i].c = PetscCosReal(.5*PETSC_PI*x);
247   }
248 
249   /*
250      Restore vectors
251   */
252   PetscCall(DMDAVecRestoreArrayWrite(da,U,&u));
253   PetscFunctionReturn(0);
254 }
255 
256 /*TEST
257 
258    test:
259       args: -pc_type lu -da_refine 2  -ts_view  -ts_monitor -ts_max_time 1
260       requires: double
261 
262    test:
263      suffix: 2
264      args:  -pc_type lu -da_refine 2  -ts_view  -ts_monitor_draw_solution -ts_monitor -ts_max_time 1
265      requires: x double
266      output_file: output/ex4_1.out
267 
268 TEST*/
269