xref: /petsc/src/ts/tutorials/ex6.c (revision b8abcfde4cf799610cd89775278ac4145d1798ce)
1 
2 static char help[] ="Solves a simple time-dependent linear PDE (the heat equation).\n\
3 Input parameters include:\n\
4   -m <points>, where <points> = number of grid points\n\
5   -time_dependent_rhs : Treat the problem as having a time-dependent right-hand side\n\
6   -debug              : Activate debugging printouts\n\
7   -nox                : Deactivate x-window graphics\n\n";
8 
9 /*
10    Concepts: TS^time-dependent linear problems
11    Concepts: TS^heat equation
12    Concepts: TS^diffusion equation
13    Routines: TSCreate(); TSSetSolution(); TSSetRHSJacobian(), TSSetIJacobian();
14    Routines: TSSetTimeStep(); TSSetMaxTime(); TSMonitorSet();
15    Routines: TSSetFromOptions(); TSStep(); TSDestroy();
16    Routines: TSSetTimeStep(); TSGetTimeStep();
17    Processors: 1
18 */
19 
20 /* ------------------------------------------------------------------------
21 
22    This program solves the one-dimensional heat equation (also called the
23    diffusion equation),
24        u_t = u_xx,
25    on the domain 0 <= x <= 1, with the boundary conditions
26        u(t,0) = 0, u(t,1) = 0,
27    and the initial condition
28        u(0,x) = sin(6*pi*x) + 3*sin(2*pi*x).
29    This is a linear, second-order, parabolic equation.
30 
31    We discretize the right-hand side using finite differences with
32    uniform grid spacing h:
33        u_xx = (u_{i+1} - 2u_{i} + u_{i-1})/(h^2)
34    We then demonstrate time evolution using the various TS methods by
35    running the program via
36        ex3 -ts_type <timestepping solver>
37 
38    We compare the approximate solution with the exact solution, given by
39        u_exact(x,t) = exp(-36*pi*pi*t) * sin(6*pi*x) +
40                       3*exp(-4*pi*pi*t) * sin(2*pi*x)
41 
42    Notes:
43    This code demonstrates the TS solver interface to two variants of
44    linear problems, u_t = f(u,t), namely
45      - time-dependent f:   f(u,t) is a function of t
46      - time-independent f: f(u,t) is simply f(u)
47 
48     The parallel version of this code is ts/tutorials/ex4.c
49 
50   ------------------------------------------------------------------------- */
51 
52 /*
53    Include "ts.h" so that we can use TS solvers.  Note that this file
54    automatically includes:
55      petscsys.h  - base PETSc routines   vec.h  - vectors
56      sys.h    - system routines       mat.h  - matrices
57      is.h     - index sets            ksp.h  - Krylov subspace methods
58      viewer.h - viewers               pc.h   - preconditioners
59      snes.h - nonlinear solvers
60 */
61 
62 #include <petscts.h>
63 #include <petscdraw.h>
64 
65 /*
66    User-defined application context - contains data needed by the
67    application-provided call-back routines.
68 */
69 typedef struct {
70   Vec         solution;          /* global exact solution vector */
71   PetscInt    m;                 /* total number of grid points */
72   PetscReal   h;                 /* mesh width h = 1/(m-1) */
73   PetscBool   debug;             /* flag (1 indicates activation of debugging printouts) */
74   PetscViewer viewer1, viewer2;  /* viewers for the solution and error */
75   PetscReal   norm_2, norm_max;  /* error norms */
76 } AppCtx;
77 
78 /*
79    User-defined routines
80 */
81 extern PetscErrorCode InitialConditions(Vec,AppCtx*);
82 extern PetscErrorCode RHSMatrixHeat(TS,PetscReal,Vec,Mat,Mat,void*);
83 extern PetscErrorCode Monitor(TS,PetscInt,PetscReal,Vec,void*);
84 extern PetscErrorCode ExactSolution(PetscReal,Vec,AppCtx*);
85 extern PetscErrorCode MyBCRoutine(TS,PetscReal,Vec,void*);
86 
87 int main(int argc,char **argv)
88 {
89   AppCtx         appctx;                 /* user-defined application context */
90   TS             ts;                     /* timestepping context */
91   Mat            A;                      /* matrix data structure */
92   Vec            u;                      /* approximate solution vector */
93   PetscReal      time_total_max = 100.0; /* default max total time */
94   PetscInt       time_steps_max = 100;   /* default max timesteps */
95   PetscDraw      draw;                   /* drawing context */
96   PetscInt       steps, m;
97   PetscMPIInt    size;
98   PetscReal      dt;
99   PetscReal      ftime;
100   PetscBool      flg;
101   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
102      Initialize program and set problem parameters
103      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
104 
105   PetscCall(PetscInitialize(&argc,&argv,(char*)0,help));
106   MPI_Comm_size(PETSC_COMM_WORLD,&size);
107   PetscCheck(size == 1,PETSC_COMM_WORLD,PETSC_ERR_WRONG_MPI_SIZE,"This is a uniprocessor example only!");
108 
109   m    = 60;
110   PetscCall(PetscOptionsGetInt(NULL,NULL,"-m",&m,NULL));
111   PetscCall(PetscOptionsHasName(NULL,NULL,"-debug",&appctx.debug));
112 
113   appctx.m        = m;
114   appctx.h        = 1.0/(m-1.0);
115   appctx.norm_2   = 0.0;
116   appctx.norm_max = 0.0;
117 
118   PetscCall(PetscPrintf(PETSC_COMM_SELF,"Solving a linear TS problem on 1 processor\n"));
119 
120   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
121      Create vector data structures
122      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
123 
124   /*
125      Create vector data structures for approximate and exact solutions
126   */
127   PetscCall(VecCreateSeq(PETSC_COMM_SELF,m,&u));
128   PetscCall(VecDuplicate(u,&appctx.solution));
129 
130   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
131      Set up displays to show graphs of the solution and error
132      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
133 
134   PetscCall(PetscViewerDrawOpen(PETSC_COMM_SELF,0,"",80,380,400,160,&appctx.viewer1));
135   PetscCall(PetscViewerDrawGetDraw(appctx.viewer1,0,&draw));
136   PetscCall(PetscDrawSetDoubleBuffer(draw));
137   PetscCall(PetscViewerDrawOpen(PETSC_COMM_SELF,0,"",80,0,400,160,&appctx.viewer2));
138   PetscCall(PetscViewerDrawGetDraw(appctx.viewer2,0,&draw));
139   PetscCall(PetscDrawSetDoubleBuffer(draw));
140 
141   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
142      Create timestepping solver context
143      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
144 
145   PetscCall(TSCreate(PETSC_COMM_SELF,&ts));
146   PetscCall(TSSetProblemType(ts,TS_LINEAR));
147 
148   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
149      Set optional user-defined monitoring routine
150      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
151 
152   PetscCall(TSMonitorSet(ts,Monitor,&appctx,NULL));
153 
154   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
155 
156      Create matrix data structure; set matrix evaluation routine.
157      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
158 
159   PetscCall(MatCreate(PETSC_COMM_SELF,&A));
160   PetscCall(MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,m,m));
161   PetscCall(MatSetFromOptions(A));
162   PetscCall(MatSetUp(A));
163 
164   PetscCall(PetscOptionsHasName(NULL,NULL,"-time_dependent_rhs",&flg));
165   if (flg) {
166     /*
167        For linear problems with a time-dependent f(u,t) in the equation
168        u_t = f(u,t), the user provides the discretized right-hand-side
169        as a time-dependent matrix.
170     */
171     PetscCall(TSSetRHSFunction(ts,NULL,TSComputeRHSFunctionLinear,&appctx));
172     PetscCall(TSSetRHSJacobian(ts,A,A,RHSMatrixHeat,&appctx));
173   } else {
174     /*
175        For linear problems with a time-independent f(u) in the equation
176        u_t = f(u), the user provides the discretized right-hand-side
177        as a matrix only once, and then sets a null matrix evaluation
178        routine.
179     */
180     PetscCall(RHSMatrixHeat(ts,0.0,u,A,A,&appctx));
181     PetscCall(TSSetRHSFunction(ts,NULL,TSComputeRHSFunctionLinear,&appctx));
182     PetscCall(TSSetRHSJacobian(ts,A,A,TSComputeRHSJacobianConstant,&appctx));
183   }
184 
185   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
186      Set solution vector and initial timestep
187      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
188 
189   dt   = appctx.h*appctx.h/2.0;
190   PetscCall(TSSetTimeStep(ts,dt));
191   PetscCall(TSSetSolution(ts,u));
192 
193   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
194      Customize timestepping solver:
195        - Set the solution method to be the Backward Euler method.
196        - Set timestepping duration info
197      Then set runtime options, which can override these defaults.
198      For example,
199           -ts_max_steps <maxsteps> -ts_max_time <maxtime>
200      to override the defaults set by TSSetMaxSteps()/TSSetMaxTime().
201      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
202 
203   PetscCall(TSSetMaxSteps(ts,time_steps_max));
204   PetscCall(TSSetMaxTime(ts,time_total_max));
205   PetscCall(TSSetExactFinalTime(ts,TS_EXACTFINALTIME_STEPOVER));
206   PetscCall(TSSetFromOptions(ts));
207 
208   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
209      Solve the problem
210      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
211 
212   /*
213      Evaluate initial conditions
214   */
215   PetscCall(InitialConditions(u,&appctx));
216 
217   /*
218      Run the timestepping solver
219   */
220   PetscCall(TSSolve(ts,u));
221   PetscCall(TSGetSolveTime(ts,&ftime));
222   PetscCall(TSGetStepNumber(ts,&steps));
223 
224   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
225      View timestepping solver info
226      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
227 
228   PetscCall(PetscPrintf(PETSC_COMM_SELF,"avg. error (2 norm) = %g, avg. error (max norm) = %g\n",(double)(appctx.norm_2/steps),(double)(appctx.norm_max/steps)));
229   PetscCall(TSView(ts,PETSC_VIEWER_STDOUT_SELF));
230 
231   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
232      Free work space.  All PETSc objects should be destroyed when they
233      are no longer needed.
234      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
235 
236   PetscCall(TSDestroy(&ts));
237   PetscCall(MatDestroy(&A));
238   PetscCall(VecDestroy(&u));
239   PetscCall(PetscViewerDestroy(&appctx.viewer1));
240   PetscCall(PetscViewerDestroy(&appctx.viewer2));
241   PetscCall(VecDestroy(&appctx.solution));
242 
243   /*
244      Always call PetscFinalize() before exiting a program.  This routine
245        - finalizes the PETSc libraries as well as MPI
246        - provides summary and diagnostic information if certain runtime
247          options are chosen (e.g., -log_view).
248   */
249   PetscCall(PetscFinalize());
250   return 0;
251 }
252 /* --------------------------------------------------------------------- */
253 /*
254    InitialConditions - Computes the solution at the initial time.
255 
256    Input Parameter:
257    u - uninitialized solution vector (global)
258    appctx - user-defined application context
259 
260    Output Parameter:
261    u - vector with solution at initial time (global)
262 */
263 PetscErrorCode InitialConditions(Vec u,AppCtx *appctx)
264 {
265   PetscScalar    *u_localptr;
266   PetscInt       i;
267 
268   /*
269     Get a pointer to vector data.
270     - For default PETSc vectors, VecGetArray() returns a pointer to
271       the data array.  Otherwise, the routine is implementation dependent.
272     - You MUST call VecRestoreArray() when you no longer need access to
273       the array.
274     - Note that the Fortran interface to VecGetArray() differs from the
275       C version.  See the users manual for details.
276   */
277   PetscCall(VecGetArray(u,&u_localptr));
278 
279   /*
280      We initialize the solution array by simply writing the solution
281      directly into the array locations.  Alternatively, we could use
282      VecSetValues() or VecSetValuesLocal().
283   */
284   for (i=0; i<appctx->m; i++) u_localptr[i] = PetscSinReal(PETSC_PI*i*6.*appctx->h) + 3.*PetscSinReal(PETSC_PI*i*2.*appctx->h);
285 
286   /*
287      Restore vector
288   */
289   PetscCall(VecRestoreArray(u,&u_localptr));
290 
291   /*
292      Print debugging information if desired
293   */
294   if (appctx->debug) {
295      PetscCall(VecView(u,PETSC_VIEWER_STDOUT_SELF));
296   }
297 
298   return 0;
299 }
300 /* --------------------------------------------------------------------- */
301 /*
302    ExactSolution - Computes the exact solution at a given time.
303 
304    Input Parameters:
305    t - current time
306    solution - vector in which exact solution will be computed
307    appctx - user-defined application context
308 
309    Output Parameter:
310    solution - vector with the newly computed exact solution
311 */
312 PetscErrorCode ExactSolution(PetscReal t,Vec solution,AppCtx *appctx)
313 {
314   PetscScalar    *s_localptr, h = appctx->h, ex1, ex2, sc1, sc2;
315   PetscInt       i;
316 
317   /*
318      Get a pointer to vector data.
319   */
320   PetscCall(VecGetArray(solution,&s_localptr));
321 
322   /*
323      Simply write the solution directly into the array locations.
324      Alternatively, we culd use VecSetValues() or VecSetValuesLocal().
325   */
326   ex1 = PetscExpReal(-36.*PETSC_PI*PETSC_PI*t); ex2 = PetscExpReal(-4.*PETSC_PI*PETSC_PI*t);
327   sc1 = PETSC_PI*6.*h;                 sc2 = PETSC_PI*2.*h;
328   for (i=0; i<appctx->m; i++) s_localptr[i] = PetscSinReal(PetscRealPart(sc1)*(PetscReal)i)*ex1 + 3.*PetscSinReal(PetscRealPart(sc2)*(PetscReal)i)*ex2;
329 
330   /*
331      Restore vector
332   */
333   PetscCall(VecRestoreArray(solution,&s_localptr));
334   return 0;
335 }
336 /* --------------------------------------------------------------------- */
337 /*
338    Monitor - User-provided routine to monitor the solution computed at
339    each timestep.  This example plots the solution and computes the
340    error in two different norms.
341 
342    This example also demonstrates changing the timestep via TSSetTimeStep().
343 
344    Input Parameters:
345    ts     - the timestep context
346    step   - the count of the current step (with 0 meaning the
347              initial condition)
348    crtime  - the current time
349    u      - the solution at this timestep
350    ctx    - the user-provided context for this monitoring routine.
351             In this case we use the application context which contains
352             information about the problem size, workspace and the exact
353             solution.
354 */
355 PetscErrorCode Monitor(TS ts,PetscInt step,PetscReal crtime,Vec u,void *ctx)
356 {
357   AppCtx         *appctx = (AppCtx*) ctx;   /* user-defined application context */
358   PetscReal      norm_2, norm_max, dt, dttol;
359   PetscBool      flg;
360 
361   /*
362      View a graph of the current iterate
363   */
364   PetscCall(VecView(u,appctx->viewer2));
365 
366   /*
367      Compute the exact solution
368   */
369   PetscCall(ExactSolution(crtime,appctx->solution,appctx));
370 
371   /*
372      Print debugging information if desired
373   */
374   if (appctx->debug) {
375     PetscCall(PetscPrintf(PETSC_COMM_SELF,"Computed solution vector\n"));
376     PetscCall(VecView(u,PETSC_VIEWER_STDOUT_SELF));
377     PetscCall(PetscPrintf(PETSC_COMM_SELF,"Exact solution vector\n"));
378     PetscCall(VecView(appctx->solution,PETSC_VIEWER_STDOUT_SELF));
379   }
380 
381   /*
382      Compute the 2-norm and max-norm of the error
383   */
384   PetscCall(VecAXPY(appctx->solution,-1.0,u));
385   PetscCall(VecNorm(appctx->solution,NORM_2,&norm_2));
386   norm_2 = PetscSqrtReal(appctx->h)*norm_2;
387   PetscCall(VecNorm(appctx->solution,NORM_MAX,&norm_max));
388 
389   PetscCall(TSGetTimeStep(ts,&dt));
390   if (norm_2 > 1.e-2) {
391     PetscCall(PetscPrintf(PETSC_COMM_SELF,"Timestep %D: step size = %g, time = %g, 2-norm error = %g, max norm error = %g\n",step,(double)dt,(double)crtime,(double)norm_2,(double)norm_max));
392   }
393   appctx->norm_2   += norm_2;
394   appctx->norm_max += norm_max;
395 
396   dttol = .0001;
397   PetscCall(PetscOptionsGetReal(NULL,NULL,"-dttol",&dttol,&flg));
398   if (dt < dttol) {
399     dt  *= .999;
400     PetscCall(TSSetTimeStep(ts,dt));
401   }
402 
403   /*
404      View a graph of the error
405   */
406   PetscCall(VecView(appctx->solution,appctx->viewer1));
407 
408   /*
409      Print debugging information if desired
410   */
411   if (appctx->debug) {
412     PetscCall(PetscPrintf(PETSC_COMM_SELF,"Error vector\n"));
413     PetscCall(VecView(appctx->solution,PETSC_VIEWER_STDOUT_SELF));
414   }
415 
416   return 0;
417 }
418 /* --------------------------------------------------------------------- */
419 /*
420    RHSMatrixHeat - User-provided routine to compute the right-hand-side
421    matrix for the heat equation.
422 
423    Input Parameters:
424    ts - the TS context
425    t - current time
426    global_in - global input vector
427    dummy - optional user-defined context, as set by TSetRHSJacobian()
428 
429    Output Parameters:
430    AA - Jacobian matrix
431    BB - optionally different preconditioning matrix
432    str - flag indicating matrix structure
433 
434    Notes:
435    Recall that MatSetValues() uses 0-based row and column numbers
436    in Fortran as well as in C.
437 */
438 PetscErrorCode RHSMatrixHeat(TS ts,PetscReal t,Vec X,Mat AA,Mat BB,void *ctx)
439 {
440   Mat            A       = AA;                /* Jacobian matrix */
441   AppCtx         *appctx = (AppCtx*) ctx;      /* user-defined application context */
442   PetscInt       mstart  = 0;
443   PetscInt       mend    = appctx->m;
444   PetscInt       i, idx[3];
445   PetscScalar    v[3], stwo = -2./(appctx->h*appctx->h), sone = -.5*stwo;
446 
447   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
448      Compute entries for the locally owned part of the matrix
449      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
450   /*
451      Set matrix rows corresponding to boundary data
452   */
453 
454   mstart = 0;
455   v[0]   = 1.0;
456   PetscCall(MatSetValues(A,1,&mstart,1,&mstart,v,INSERT_VALUES));
457   mstart++;
458 
459   mend--;
460   v[0] = 1.0;
461   PetscCall(MatSetValues(A,1,&mend,1,&mend,v,INSERT_VALUES));
462 
463   /*
464      Set matrix rows corresponding to interior data.  We construct the
465      matrix one row at a time.
466   */
467   v[0] = sone; v[1] = stwo; v[2] = sone;
468   for (i=mstart; i<mend; i++) {
469     idx[0] = i-1; idx[1] = i; idx[2] = i+1;
470     PetscCall(MatSetValues(A,1,&i,3,idx,v,INSERT_VALUES));
471   }
472 
473   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
474      Complete the matrix assembly process and set some options
475      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
476   /*
477      Assemble matrix, using the 2-step process:
478        MatAssemblyBegin(), MatAssemblyEnd()
479      Computations can be done while messages are in transition
480      by placing code between these two statements.
481   */
482   PetscCall(MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY));
483   PetscCall(MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY));
484 
485   /*
486      Set and option to indicate that we will never add a new nonzero location
487      to the matrix. If we do, it will generate an error.
488   */
489   PetscCall(MatSetOption(A,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE));
490 
491   return 0;
492 }
493 /* --------------------------------------------------------------------- */
494 /*
495    Input Parameters:
496    ts - the TS context
497    t - current time
498    f - function
499    ctx - optional user-defined context, as set by TSetBCFunction()
500  */
501 PetscErrorCode MyBCRoutine(TS ts,PetscReal t,Vec f,void *ctx)
502 {
503   AppCtx         *appctx = (AppCtx*) ctx;      /* user-defined application context */
504   PetscInt       m = appctx->m;
505   PetscScalar    *fa;
506 
507   PetscCall(VecGetArray(f,&fa));
508   fa[0]   = 0.0;
509   fa[m-1] = 1.0;
510   PetscCall(VecRestoreArray(f,&fa));
511   PetscCall(PetscPrintf(PETSC_COMM_SELF,"t=%g\n",(double)t));
512 
513   return 0;
514 }
515 
516 /*TEST
517 
518     test:
519       args: -nox -ts_max_steps 4
520 
521 TEST*/
522