xref: /petsc/src/ts/tutorials/advection-diffusion-reaction/ex4.c (revision 58d68138c660dfb4e9f5b03334792cd4f2ffd7cc)
1 
2 static char help[] = "Chemo-taxis Problems from Mathematical Biology.\n";
3 
4 /*
5      Page 18, Chemo-taxis Problems from Mathematical Biology
6 
7         rho_t =
8         c_t   =
9 
10      Further discusson on Page 134 and in 2d on Page 409
11 */
12 
13 /*
14 
15    Include "petscdmda.h" so that we can use distributed arrays (DMDAs).
16    Include "petscts.h" so that we can use SNES solvers.  Note that this
17    file automatically includes:
18      petscsys.h       - base PETSc routines   petscvec.h - vectors
19      petscmat.h - matrices
20      petscis.h     - index sets            petscksp.h - Krylov subspace methods
21      petscviewer.h - viewers               petscpc.h  - preconditioners
22      petscksp.h   - linear solvers
23 */
24 
25 #include <petscdm.h>
26 #include <petscdmda.h>
27 #include <petscts.h>
28 
29 typedef struct {
30   PetscScalar rho, c;
31 } Field;
32 
33 typedef struct {
34   PetscScalar epsilon, delta, alpha, beta, gamma, kappa, lambda, mu, cstar;
35   PetscBool   upwind;
36 } AppCtx;
37 
38 /*
39    User-defined routines
40 */
41 extern PetscErrorCode IFunction(TS, PetscReal, Vec, Vec, Vec, void *), InitialConditions(DM, Vec);
42 
43 int main(int argc, char **argv) {
44   TS     ts; /* nonlinear solver */
45   Vec    U;  /* solution, residual vectors */
46   DM     da;
47   AppCtx appctx;
48 
49   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
50      Initialize program
51      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
52   PetscFunctionBeginUser;
53   PetscCall(PetscInitialize(&argc, &argv, (char *)0, help));
54 
55   appctx.epsilon = 1.0e-3;
56   appctx.delta   = 1.0;
57   appctx.alpha   = 10.0;
58   appctx.beta    = 4.0;
59   appctx.gamma   = 1.0;
60   appctx.kappa   = .75;
61   appctx.lambda  = 1.0;
62   appctx.mu      = 100.;
63   appctx.cstar   = .2;
64   appctx.upwind  = PETSC_TRUE;
65 
66   PetscCall(PetscOptionsGetScalar(NULL, NULL, "-delta", &appctx.delta, NULL));
67   PetscCall(PetscOptionsGetBool(NULL, NULL, "-upwind", &appctx.upwind, NULL));
68 
69   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
70      Create distributed array (DMDA) to manage parallel grid and vectors
71   - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
72   PetscCall(DMDACreate1d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, 8, 2, 1, NULL, &da));
73   PetscCall(DMSetFromOptions(da));
74   PetscCall(DMSetUp(da));
75   PetscCall(DMDASetFieldName(da, 0, "rho"));
76   PetscCall(DMDASetFieldName(da, 1, "c"));
77 
78   /*  - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
79      Extract global vectors from DMDA; then duplicate for remaining
80      vectors that are the same types
81    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
82   PetscCall(DMCreateGlobalVector(da, &U));
83 
84   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
85      Create timestepping solver context
86      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
87   PetscCall(TSCreate(PETSC_COMM_WORLD, &ts));
88   PetscCall(TSSetType(ts, TSROSW));
89   PetscCall(TSSetDM(ts, da));
90   PetscCall(TSSetProblemType(ts, TS_NONLINEAR));
91   PetscCall(TSSetIFunction(ts, NULL, IFunction, &appctx));
92 
93   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
94      Set initial conditions
95    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
96   PetscCall(InitialConditions(da, U));
97   PetscCall(TSSetSolution(ts, U));
98 
99   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
100      Set solver options
101    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
102   PetscCall(TSSetTimeStep(ts, .0001));
103   PetscCall(TSSetMaxTime(ts, 1.0));
104   PetscCall(TSSetExactFinalTime(ts, TS_EXACTFINALTIME_STEPOVER));
105   PetscCall(TSSetFromOptions(ts));
106 
107   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
108      Solve nonlinear system
109      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
110   PetscCall(TSSolve(ts, U));
111 
112   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
113      Free work space.  All PETSc objects should be destroyed when they
114      are no longer needed.
115    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
116   PetscCall(VecDestroy(&U));
117   PetscCall(TSDestroy(&ts));
118   PetscCall(DMDestroy(&da));
119 
120   PetscCall(PetscFinalize());
121   return 0;
122 }
123 /* ------------------------------------------------------------------- */
124 /*
125    IFunction - Evaluates nonlinear function, F(U).
126 
127    Input Parameters:
128 .  ts - the TS context
129 .  U - input vector
130 .  ptr - optional user-defined context, as set by SNESSetFunction()
131 
132    Output Parameter:
133 .  F - function vector
134  */
135 PetscErrorCode IFunction(TS ts, PetscReal ftime, Vec U, Vec Udot, Vec F, void *ptr) {
136   AppCtx     *appctx = (AppCtx *)ptr;
137   DM          da;
138   PetscInt    i, Mx, xs, xm;
139   PetscReal   hx, sx;
140   PetscScalar rho, c, rhoxx, cxx, cx, rhox, kcxrhox;
141   Field      *u, *f, *udot;
142   Vec         localU;
143 
144   PetscFunctionBegin;
145   PetscCall(TSGetDM(ts, &da));
146   PetscCall(DMGetLocalVector(da, &localU));
147   PetscCall(DMDAGetInfo(da, PETSC_IGNORE, &Mx, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE));
148 
149   hx = 1.0 / (PetscReal)(Mx - 1);
150   sx = 1.0 / (hx * hx);
151 
152   /*
153      Scatter ghost points to local vector,using the 2-step process
154         DMGlobalToLocalBegin(),DMGlobalToLocalEnd().
155      By placing code between these two statements, computations can be
156      done while messages are in transition.
157   */
158   PetscCall(DMGlobalToLocalBegin(da, U, INSERT_VALUES, localU));
159   PetscCall(DMGlobalToLocalEnd(da, U, INSERT_VALUES, localU));
160 
161   /*
162      Get pointers to vector data
163   */
164   PetscCall(DMDAVecGetArrayRead(da, localU, &u));
165   PetscCall(DMDAVecGetArrayRead(da, Udot, &udot));
166   PetscCall(DMDAVecGetArrayWrite(da, F, &f));
167 
168   /*
169      Get local grid boundaries
170   */
171   PetscCall(DMDAGetCorners(da, &xs, NULL, NULL, &xm, NULL, NULL));
172 
173   if (!xs) {
174     f[0].rho = udot[0].rho; /* u[0].rho - 0.0; */
175     f[0].c   = udot[0].c;   /* u[0].c   - 1.0; */
176     xs++;
177     xm--;
178   }
179   if (xs + xm == Mx) {
180     f[Mx - 1].rho = udot[Mx - 1].rho; /* u[Mx-1].rho - 1.0; */
181     f[Mx - 1].c   = udot[Mx - 1].c;   /* u[Mx-1].c   - 0.0;  */
182     xm--;
183   }
184 
185   /*
186      Compute function over the locally owned part of the grid
187   */
188   for (i = xs; i < xs + xm; i++) {
189     rho   = u[i].rho;
190     rhoxx = (-2.0 * rho + u[i - 1].rho + u[i + 1].rho) * sx;
191     c     = u[i].c;
192     cxx   = (-2.0 * c + u[i - 1].c + u[i + 1].c) * sx;
193 
194     if (!appctx->upwind) {
195       rhox    = .5 * (u[i + 1].rho - u[i - 1].rho) / hx;
196       cx      = .5 * (u[i + 1].c - u[i - 1].c) / hx;
197       kcxrhox = appctx->kappa * (cxx * rho + cx * rhox);
198     } else {
199       kcxrhox = appctx->kappa * ((u[i + 1].c - u[i].c) * u[i + 1].rho - (u[i].c - u[i - 1].c) * u[i].rho) * sx;
200     }
201 
202     f[i].rho = udot[i].rho - appctx->epsilon * rhoxx + kcxrhox - appctx->mu * PetscAbsScalar(rho) * (1.0 - rho) * PetscMax(0, PetscRealPart(c - appctx->cstar)) + appctx->beta * rho;
203     f[i].c   = udot[i].c - appctx->delta * cxx + appctx->lambda * c + appctx->alpha * rho * c / (appctx->gamma + c);
204   }
205 
206   /*
207      Restore vectors
208   */
209   PetscCall(DMDAVecRestoreArrayRead(da, localU, &u));
210   PetscCall(DMDAVecRestoreArrayRead(da, Udot, &udot));
211   PetscCall(DMDAVecRestoreArrayWrite(da, F, &f));
212   PetscCall(DMRestoreLocalVector(da, &localU));
213   PetscFunctionReturn(0);
214 }
215 
216 /* ------------------------------------------------------------------- */
217 PetscErrorCode InitialConditions(DM da, Vec U) {
218   PetscInt  i, xs, xm, Mx;
219   Field    *u;
220   PetscReal hx, x;
221 
222   PetscFunctionBegin;
223   PetscCall(DMDAGetInfo(da, PETSC_IGNORE, &Mx, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE));
224 
225   hx = 1.0 / (PetscReal)(Mx - 1);
226 
227   /*
228      Get pointers to vector data
229   */
230   PetscCall(DMDAVecGetArrayWrite(da, U, &u));
231 
232   /*
233      Get local grid boundaries
234   */
235   PetscCall(DMDAGetCorners(da, &xs, NULL, NULL, &xm, NULL, NULL));
236 
237   /*
238      Compute function over the locally owned part of the grid
239   */
240   for (i = xs; i < xs + xm; i++) {
241     x = i * hx;
242     if (i < Mx - 1) u[i].rho = 0.0;
243     else u[i].rho = 1.0;
244     u[i].c = PetscCosReal(.5 * PETSC_PI * x);
245   }
246 
247   /*
248      Restore vectors
249   */
250   PetscCall(DMDAVecRestoreArrayWrite(da, U, &u));
251   PetscFunctionReturn(0);
252 }
253 
254 /*TEST
255 
256    test:
257       args: -pc_type lu -da_refine 2  -ts_view  -ts_monitor -ts_max_time 1
258       requires: double
259 
260    test:
261      suffix: 2
262      args:  -pc_type lu -da_refine 2  -ts_view  -ts_monitor_draw_solution -ts_monitor -ts_max_time 1
263      requires: x double
264      output_file: output/ex4_1.out
265 
266 TEST*/
267