xref: /petsc/src/ts/tutorials/advection-diffusion-reaction/ex3.c (revision ebead697dbf761eb322f829370bbe90b3bd93fa3)
1 
2 static char help[] ="Model Equations for Advection-Diffusion\n";
3 
4 /*
5     Page 9, Section 1.2 Model Equations for Advection-Diffusion
6 
7           u_t = a u_x + d u_xx
8 
9    The initial conditions used here different then in the book.
10 
11 */
12 
13 /*
14      Helpful runtime linear solver options:
15            -pc_type mg -da_refine 2 -snes_monitor -ksp_monitor -ts_view   (geometric multigrid with three levels)
16 
17 */
18 
19 /*
20    Include "petscts.h" so that we can use TS solvers.  Note that this file
21    automatically includes:
22      petscsys.h       - base PETSc routines   petscvec.h  - vectors
23      petscmat.h  - matrices
24      petscis.h     - index sets            petscksp.h  - Krylov subspace methods
25      petscviewer.h - viewers               petscpc.h   - preconditioners
26      petscksp.h   - linear solvers        petscsnes.h - nonlinear solvers
27 */
28 
29 #include <petscts.h>
30 #include <petscdm.h>
31 #include <petscdmda.h>
32 
33 /*
34    User-defined application context - contains data needed by the
35    application-provided call-back routines.
36 */
37 typedef struct {
38   PetscScalar a,d;   /* advection and diffusion strength */
39   PetscBool   upwind;
40 } AppCtx;
41 
42 /*
43    User-defined routines
44 */
45 extern PetscErrorCode InitialConditions(TS,Vec,AppCtx*);
46 extern PetscErrorCode RHSMatrixHeat(TS,PetscReal,Vec,Mat,Mat,void*);
47 extern PetscErrorCode Solution(TS,PetscReal,Vec,AppCtx*);
48 
49 int main(int argc,char **argv)
50 {
51   AppCtx         appctx;                 /* user-defined application context */
52   TS             ts;                     /* timestepping context */
53   Vec            U;                      /* approximate solution vector */
54   PetscReal      dt;
55   DM             da;
56   PetscInt       M;
57 
58   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
59      Initialize program and set problem parameters
60      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
61 
62   PetscFunctionBeginUser;
63   PetscCall(PetscInitialize(&argc,&argv,(char*)0,help));
64   appctx.a      = 1.0;
65   appctx.d      = 0.0;
66   PetscCall(PetscOptionsGetScalar(NULL,NULL,"-a",&appctx.a,NULL));
67   PetscCall(PetscOptionsGetScalar(NULL,NULL,"-d",&appctx.d,NULL));
68   appctx.upwind = PETSC_TRUE;
69   PetscCall(PetscOptionsGetBool(NULL,NULL,"-upwind",&appctx.upwind,NULL));
70 
71   PetscCall(DMDACreate1d(PETSC_COMM_WORLD,DM_BOUNDARY_PERIODIC, 60, 1, 1,NULL,&da));
72   PetscCall(DMSetFromOptions(da));
73   PetscCall(DMSetUp(da));
74   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
75      Create vector data structures
76      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
77 
78   /*
79      Create vector data structures for approximate and exact solutions
80   */
81   PetscCall(DMCreateGlobalVector(da,&U));
82 
83   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
84      Create timestepping solver context
85      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
86 
87   PetscCall(TSCreate(PETSC_COMM_WORLD,&ts));
88   PetscCall(TSSetDM(ts,da));
89 
90   /*
91       For linear problems with a time-dependent f(U,t) in the equation
92      u_t = f(u,t), the user provides the discretized right-hand-side
93       as a time-dependent matrix.
94   */
95   PetscCall(TSSetRHSFunction(ts,NULL,TSComputeRHSFunctionLinear,&appctx));
96   PetscCall(TSSetRHSJacobian(ts,NULL,NULL,RHSMatrixHeat,&appctx));
97   PetscCall(TSSetSolutionFunction(ts,(PetscErrorCode (*)(TS,PetscReal,Vec,void*))Solution,&appctx));
98 
99   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
100      Customize timestepping solver:
101        - Set timestepping duration info
102      Then set runtime options, which can override these defaults.
103      For example,
104           -ts_max_steps <maxsteps> -ts_max_time <maxtime>
105      to override the defaults set by TSSetMaxSteps()/TSSetMaxTime().
106      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
107 
108   PetscCall(DMDAGetInfo(da,PETSC_IGNORE,&M,0,0,0,0,0,0,0,0,0,0,0));
109   dt   = .48/(M*M);
110   PetscCall(TSSetTimeStep(ts,dt));
111   PetscCall(TSSetMaxSteps(ts,1000));
112   PetscCall(TSSetMaxTime(ts,100.0));
113   PetscCall(TSSetExactFinalTime(ts,TS_EXACTFINALTIME_STEPOVER));
114   PetscCall(TSSetType(ts,TSARKIMEX));
115   PetscCall(TSSetFromOptions(ts));
116 
117   /*
118      Evaluate initial conditions
119   */
120   PetscCall(InitialConditions(ts,U,&appctx));
121 
122   /*
123      Run the timestepping solver
124   */
125   PetscCall(TSSolve(ts,U));
126 
127   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
128      Free work space.  All PETSc objects should be destroyed when they
129      are no longer needed.
130      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
131 
132   PetscCall(TSDestroy(&ts));
133   PetscCall(VecDestroy(&U));
134   PetscCall(DMDestroy(&da));
135 
136   /*
137      Always call PetscFinalize() before exiting a program.  This routine
138        - finalizes the PETSc libraries as well as MPI
139        - provides summary and diagnostic information if certain runtime
140          options are chosen (e.g., -log_view).
141   */
142   PetscCall(PetscFinalize());
143   return 0;
144 }
145 /* --------------------------------------------------------------------- */
146 /*
147    InitialConditions - Computes the solution at the initial time.
148 
149    Input Parameter:
150    u - uninitialized solution vector (global)
151    appctx - user-defined application context
152 
153    Output Parameter:
154    u - vector with solution at initial time (global)
155 */
156 PetscErrorCode InitialConditions(TS ts,Vec U,AppCtx *appctx)
157 {
158   PetscScalar    *u,h;
159   PetscInt       i,mstart,mend,xm,M;
160   DM             da;
161 
162   PetscCall(TSGetDM(ts,&da));
163   PetscCall(DMDAGetCorners(da,&mstart,0,0,&xm,0,0));
164   PetscCall(DMDAGetInfo(da,PETSC_IGNORE,&M,0,0,0,0,0,0,0,0,0,0,0));
165   h    = 1.0/M;
166   mend = mstart + xm;
167   /*
168     Get a pointer to vector data.
169     - For default PETSc vectors, VecGetArray() returns a pointer to
170       the data array.  Otherwise, the routine is implementation dependent.
171     - You MUST call VecRestoreArray() when you no longer need access to
172       the array.
173     - Note that the Fortran interface to VecGetArray() differs from the
174       C version.  See the users manual for details.
175   */
176   PetscCall(DMDAVecGetArray(da,U,&u));
177 
178   /*
179      We initialize the solution array by simply writing the solution
180      directly into the array locations.  Alternatively, we could use
181      VecSetValues() or VecSetValuesLocal().
182   */
183   for (i=mstart; i<mend; i++) u[i] = PetscSinScalar(PETSC_PI*i*6.*h) + 3.*PetscSinScalar(PETSC_PI*i*2.*h);
184 
185   /*
186      Restore vector
187   */
188   PetscCall(DMDAVecRestoreArray(da,U,&u));
189   return 0;
190 }
191 /* --------------------------------------------------------------------- */
192 /*
193    Solution - Computes the exact solution at a given time.
194 
195    Input Parameters:
196    t - current time
197    solution - vector in which exact solution will be computed
198    appctx - user-defined application context
199 
200    Output Parameter:
201    solution - vector with the newly computed exact solution
202 */
203 PetscErrorCode Solution(TS ts,PetscReal t,Vec U,AppCtx *appctx)
204 {
205   PetscScalar    *u,ex1,ex2,sc1,sc2,h;
206   PetscInt       i,mstart,mend,xm,M;
207   DM             da;
208 
209   PetscCall(TSGetDM(ts,&da));
210   PetscCall(DMDAGetCorners(da,&mstart,0,0,&xm,0,0));
211   PetscCall(DMDAGetInfo(da,PETSC_IGNORE,&M,0,0,0,0,0,0,0,0,0,0,0));
212   h    = 1.0/M;
213   mend = mstart + xm;
214   /*
215      Get a pointer to vector data.
216   */
217   PetscCall(DMDAVecGetArray(da,U,&u));
218 
219   /*
220      Simply write the solution directly into the array locations.
221      Alternatively, we culd use VecSetValues() or VecSetValuesLocal().
222   */
223   ex1 = PetscExpScalar(-36.*PETSC_PI*PETSC_PI*appctx->d*t);
224   ex2 = PetscExpScalar(-4.*PETSC_PI*PETSC_PI*appctx->d*t);
225   sc1 = PETSC_PI*6.*h;                 sc2 = PETSC_PI*2.*h;
226   for (i=mstart; i<mend; i++) u[i] = PetscSinScalar(sc1*(PetscReal)i + appctx->a*PETSC_PI*6.*t)*ex1 + 3.*PetscSinScalar(sc2*(PetscReal)i + appctx->a*PETSC_PI*2.*t)*ex2;
227 
228   /*
229      Restore vector
230   */
231   PetscCall(DMDAVecRestoreArray(da,U,&u));
232   return 0;
233 }
234 
235 /* --------------------------------------------------------------------- */
236 /*
237    RHSMatrixHeat - User-provided routine to compute the right-hand-side
238    matrix for the heat equation.
239 
240    Input Parameters:
241    ts - the TS context
242    t - current time
243    global_in - global input vector
244    dummy - optional user-defined context, as set by TSetRHSJacobian()
245 
246    Output Parameters:
247    AA - Jacobian matrix
248    BB - optionally different preconditioning matrix
249    str - flag indicating matrix structure
250 
251    Notes:
252    Recall that MatSetValues() uses 0-based row and column numbers
253    in Fortran as well as in C.
254 */
255 PetscErrorCode RHSMatrixHeat(TS ts,PetscReal t,Vec U,Mat AA,Mat BB,void *ctx)
256 {
257   Mat            A       = AA;                /* Jacobian matrix */
258   AppCtx         *appctx = (AppCtx*)ctx;     /* user-defined application context */
259   PetscInt       mstart, mend;
260   PetscInt       i,idx[3],M,xm;
261   PetscScalar    v[3],h;
262   DM             da;
263 
264   PetscCall(TSGetDM(ts,&da));
265   PetscCall(DMDAGetInfo(da,0,&M,0,0,0,0,0,0,0,0,0,0,0));
266   PetscCall(DMDAGetCorners(da,&mstart,0,0,&xm,0,0));
267   h    = 1.0/M;
268   mend = mstart + xm;
269   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
270      Compute entries for the locally owned part of the matrix
271      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
272   /*
273      Set matrix rows corresponding to boundary data
274   */
275 
276   /* diffusion */
277   v[0] = appctx->d/(h*h);
278   v[1] = -2.0*appctx->d/(h*h);
279   v[2] = appctx->d/(h*h);
280   if (!mstart) {
281     idx[0] = M-1; idx[1] = 0; idx[2] = 1;
282     PetscCall(MatSetValues(A,1,&mstart,3,idx,v,INSERT_VALUES));
283     mstart++;
284   }
285 
286   if (mend == M) {
287     mend--;
288     idx[0] = M-2; idx[1] = M-1; idx[2] = 0;
289     PetscCall(MatSetValues(A,1,&mend,3,idx,v,INSERT_VALUES));
290   }
291 
292   /*
293      Set matrix rows corresponding to interior data.  We construct the
294      matrix one row at a time.
295   */
296   for (i=mstart; i<mend; i++) {
297     idx[0] = i-1; idx[1] = i; idx[2] = i+1;
298     PetscCall(MatSetValues(A,1,&i,3,idx,v,INSERT_VALUES));
299   }
300   PetscCall(MatAssemblyBegin(A,MAT_FLUSH_ASSEMBLY));
301   PetscCall(MatAssemblyEnd(A,MAT_FLUSH_ASSEMBLY));
302 
303   PetscCall(DMDAGetCorners(da,&mstart,0,0,&xm,0,0));
304   mend = mstart + xm;
305   if (!appctx->upwind) {
306     /* advection -- centered differencing */
307     v[0] = -.5*appctx->a/(h);
308     v[1] = .5*appctx->a/(h);
309     if (!mstart) {
310       idx[0] = M-1; idx[1] = 1;
311       PetscCall(MatSetValues(A,1,&mstart,2,idx,v,ADD_VALUES));
312       mstart++;
313     }
314 
315     if (mend == M) {
316       mend--;
317       idx[0] = M-2; idx[1] = 0;
318       PetscCall(MatSetValues(A,1,&mend,2,idx,v,ADD_VALUES));
319     }
320 
321     for (i=mstart; i<mend; i++) {
322       idx[0] = i-1; idx[1] = i+1;
323       PetscCall(MatSetValues(A,1,&i,2,idx,v,ADD_VALUES));
324     }
325   } else {
326     /* advection -- upwinding */
327     v[0] = -appctx->a/(h);
328     v[1] = appctx->a/(h);
329     if (!mstart) {
330       idx[0] = 0; idx[1] = 1;
331       PetscCall(MatSetValues(A,1,&mstart,2,idx,v,ADD_VALUES));
332       mstart++;
333     }
334 
335     if (mend == M) {
336       mend--;
337       idx[0] = M-1; idx[1] = 0;
338       PetscCall(MatSetValues(A,1,&mend,2,idx,v,ADD_VALUES));
339     }
340 
341     for (i=mstart; i<mend; i++) {
342       idx[0] = i; idx[1] = i+1;
343       PetscCall(MatSetValues(A,1,&i,2,idx,v,ADD_VALUES));
344     }
345   }
346 
347   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
348      Complete the matrix assembly process and set some options
349      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
350   /*
351      Assemble matrix, using the 2-step process:
352        MatAssemblyBegin(), MatAssemblyEnd()
353      Computations can be done while messages are in transition
354      by placing code between these two statements.
355   */
356   PetscCall(MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY));
357   PetscCall(MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY));
358 
359   /*
360      Set and option to indicate that we will never add a new nonzero location
361      to the matrix. If we do, it will generate an error.
362   */
363   PetscCall(MatSetOption(A,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE));
364   return 0;
365 }
366 
367 /*TEST
368 
369    test:
370       args: -pc_type mg -da_refine 2  -ts_view  -ts_monitor -ts_max_time .3 -mg_levels_ksp_max_it 3
371       requires: double
372       filter: grep -v "total number of"
373 
374    test:
375       suffix: 2
376       args:  -pc_type mg -da_refine 2  -ts_view  -ts_monitor_draw_solution -ts_monitor -ts_max_time .3 -mg_levels_ksp_max_it 3
377       requires: x
378       output_file: output/ex3_1.out
379       requires: double
380       filter: grep -v "total number of"
381 
382 TEST*/
383