xref: /petsc/src/ts/tutorials/ex4.c (revision b122ec5aa1bd4469eb4e0673542fb7de3f411254)
1 
2 static char help[] ="Solves a simple time-dependent linear PDE (the heat equation).\n\
3 Input parameters include:\n\
4   -m <points>, where <points> = number of grid points\n\
5   -time_dependent_rhs : Treat the problem as having a time-dependent right-hand side\n\
6   -debug              : Activate debugging printouts\n\
7   -nox                : Deactivate x-window graphics\n\n";
8 
9 /*
10    Concepts: TS^time-dependent linear problems
11    Concepts: TS^heat equation
12    Concepts: TS^diffusion equation
13    Processors: n
14 */
15 
16 /* ------------------------------------------------------------------------
17 
18    This program solves the one-dimensional heat equation (also called the
19    diffusion equation),
20        u_t = u_xx,
21    on the domain 0 <= x <= 1, with the boundary conditions
22        u(t,0) = 0, u(t,1) = 0,
23    and the initial condition
24        u(0,x) = sin(6*pi*x) + 3*sin(2*pi*x).
25    This is a linear, second-order, parabolic equation.
26 
27    We discretize the right-hand side using finite differences with
28    uniform grid spacing h:
29        u_xx = (u_{i+1} - 2u_{i} + u_{i-1})/(h^2)
30    We then demonstrate time evolution using the various TS methods by
31    running the program via
32        mpiexec -n <procs> ex3 -ts_type <timestepping solver>
33 
34    We compare the approximate solution with the exact solution, given by
35        u_exact(x,t) = exp(-36*pi*pi*t) * sin(6*pi*x) +
36                       3*exp(-4*pi*pi*t) * sin(2*pi*x)
37 
38    Notes:
39    This code demonstrates the TS solver interface to two variants of
40    linear problems, u_t = f(u,t), namely
41      - time-dependent f:   f(u,t) is a function of t
42      - time-independent f: f(u,t) is simply f(u)
43 
44     The uniprocessor version of this code is ts/tutorials/ex3.c
45 
46   ------------------------------------------------------------------------- */
47 
48 /*
49    Include "petscdmda.h" so that we can use distributed arrays (DMDAs) to manage
50    the parallel grid.  Include "petscts.h" so that we can use TS solvers.
51    Note that this file automatically includes:
52      petscsys.h       - base PETSc routines   petscvec.h  - vectors
53      petscmat.h  - matrices
54      petscis.h     - index sets            petscksp.h  - Krylov subspace methods
55      petscviewer.h - viewers               petscpc.h   - preconditioners
56      petscksp.h   - linear solvers        petscsnes.h - nonlinear solvers
57 */
58 
59 #include <petscdm.h>
60 #include <petscdmda.h>
61 #include <petscts.h>
62 #include <petscdraw.h>
63 
64 /*
65    User-defined application context - contains data needed by the
66    application-provided call-back routines.
67 */
68 typedef struct {
69   MPI_Comm    comm;              /* communicator */
70   DM          da;                /* distributed array data structure */
71   Vec         localwork;         /* local ghosted work vector */
72   Vec         u_local;           /* local ghosted approximate solution vector */
73   Vec         solution;          /* global exact solution vector */
74   PetscInt    m;                 /* total number of grid points */
75   PetscReal   h;                 /* mesh width h = 1/(m-1) */
76   PetscBool   debug;             /* flag (1 indicates activation of debugging printouts) */
77   PetscViewer viewer1,viewer2;  /* viewers for the solution and error */
78   PetscReal   norm_2,norm_max;  /* error norms */
79 } AppCtx;
80 
81 /*
82    User-defined routines
83 */
84 extern PetscErrorCode InitialConditions(Vec,AppCtx*);
85 extern PetscErrorCode RHSMatrixHeat(TS,PetscReal,Vec,Mat,Mat,void*);
86 extern PetscErrorCode RHSFunctionHeat(TS,PetscReal,Vec,Vec,void*);
87 extern PetscErrorCode Monitor(TS,PetscInt,PetscReal,Vec,void*);
88 extern PetscErrorCode ExactSolution(PetscReal,Vec,AppCtx*);
89 
90 int main(int argc,char **argv)
91 {
92   AppCtx         appctx;                 /* user-defined application context */
93   TS             ts;                     /* timestepping context */
94   Mat            A;                      /* matrix data structure */
95   Vec            u;                      /* approximate solution vector */
96   PetscReal      time_total_max = 1.0;   /* default max total time */
97   PetscInt       time_steps_max = 100;   /* default max timesteps */
98   PetscDraw      draw;                   /* drawing context */
99   PetscInt       steps,m;
100   PetscMPIInt    size;
101   PetscReal      dt,ftime;
102   PetscBool      flg;
103   TSProblemType  tsproblem = TS_LINEAR;
104 
105   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
106      Initialize program and set problem parameters
107      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
108 
109   CHKERRQ(PetscInitialize(&argc,&argv,(char*)0,help));
110   appctx.comm = PETSC_COMM_WORLD;
111 
112   m               = 60;
113   CHKERRQ(PetscOptionsGetInt(NULL,NULL,"-m",&m,NULL));
114   CHKERRQ(PetscOptionsHasName(NULL,NULL,"-debug",&appctx.debug));
115   appctx.m        = m;
116   appctx.h        = 1.0/(m-1.0);
117   appctx.norm_2   = 0.0;
118   appctx.norm_max = 0.0;
119 
120   CHKERRMPI(MPI_Comm_size(PETSC_COMM_WORLD,&size));
121   CHKERRQ(PetscPrintf(PETSC_COMM_WORLD,"Solving a linear TS problem, number of processors = %d\n",size));
122 
123   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
124      Create vector data structures
125      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
126   /*
127      Create distributed array (DMDA) to manage parallel grid and vectors
128      and to set up the ghost point communication pattern.  There are M
129      total grid values spread equally among all the processors.
130   */
131 
132   CHKERRQ(DMDACreate1d(PETSC_COMM_WORLD,DM_BOUNDARY_NONE,m,1,1,NULL,&appctx.da));
133   CHKERRQ(DMSetFromOptions(appctx.da));
134   CHKERRQ(DMSetUp(appctx.da));
135 
136   /*
137      Extract global and local vectors from DMDA; we use these to store the
138      approximate solution.  Then duplicate these for remaining vectors that
139      have the same types.
140   */
141   CHKERRQ(DMCreateGlobalVector(appctx.da,&u));
142   CHKERRQ(DMCreateLocalVector(appctx.da,&appctx.u_local));
143 
144   /*
145      Create local work vector for use in evaluating right-hand-side function;
146      create global work vector for storing exact solution.
147   */
148   CHKERRQ(VecDuplicate(appctx.u_local,&appctx.localwork));
149   CHKERRQ(VecDuplicate(u,&appctx.solution));
150 
151   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
152      Set up displays to show graphs of the solution and error
153      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
154 
155   CHKERRQ(PetscViewerDrawOpen(PETSC_COMM_WORLD,0,"",80,380,400,160,&appctx.viewer1));
156   CHKERRQ(PetscViewerDrawGetDraw(appctx.viewer1,0,&draw));
157   CHKERRQ(PetscDrawSetDoubleBuffer(draw));
158   CHKERRQ(PetscViewerDrawOpen(PETSC_COMM_WORLD,0,"",80,0,400,160,&appctx.viewer2));
159   CHKERRQ(PetscViewerDrawGetDraw(appctx.viewer2,0,&draw));
160   CHKERRQ(PetscDrawSetDoubleBuffer(draw));
161 
162   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
163      Create timestepping solver context
164      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
165 
166   CHKERRQ(TSCreate(PETSC_COMM_WORLD,&ts));
167 
168   flg  = PETSC_FALSE;
169   CHKERRQ(PetscOptionsGetBool(NULL,NULL,"-nonlinear",&flg,NULL));
170   CHKERRQ(TSSetProblemType(ts,flg ? TS_NONLINEAR : TS_LINEAR));
171 
172   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
173      Set optional user-defined monitoring routine
174      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
175   CHKERRQ(TSMonitorSet(ts,Monitor,&appctx,NULL));
176 
177   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
178 
179      Create matrix data structure; set matrix evaluation routine.
180      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
181 
182   CHKERRQ(MatCreate(PETSC_COMM_WORLD,&A));
183   CHKERRQ(MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,m,m));
184   CHKERRQ(MatSetFromOptions(A));
185   CHKERRQ(MatSetUp(A));
186 
187   flg  = PETSC_FALSE;
188   CHKERRQ(PetscOptionsGetBool(NULL,NULL,"-time_dependent_rhs",&flg,NULL));
189   if (flg) {
190     /*
191        For linear problems with a time-dependent f(u,t) in the equation
192        u_t = f(u,t), the user provides the discretized right-hand-side
193        as a time-dependent matrix.
194     */
195     CHKERRQ(TSSetRHSFunction(ts,NULL,TSComputeRHSFunctionLinear,&appctx));
196     CHKERRQ(TSSetRHSJacobian(ts,A,A,RHSMatrixHeat,&appctx));
197   } else {
198     /*
199        For linear problems with a time-independent f(u) in the equation
200        u_t = f(u), the user provides the discretized right-hand-side
201        as a matrix only once, and then sets a null matrix evaluation
202        routine.
203     */
204     CHKERRQ(RHSMatrixHeat(ts,0.0,u,A,A,&appctx));
205     CHKERRQ(TSSetRHSFunction(ts,NULL,TSComputeRHSFunctionLinear,&appctx));
206     CHKERRQ(TSSetRHSJacobian(ts,A,A,TSComputeRHSJacobianConstant,&appctx));
207   }
208 
209   if (tsproblem == TS_NONLINEAR) {
210     SNES snes;
211     CHKERRQ(TSSetRHSFunction(ts,NULL,RHSFunctionHeat,&appctx));
212     CHKERRQ(TSGetSNES(ts,&snes));
213     CHKERRQ(SNESSetJacobian(snes,NULL,NULL,SNESComputeJacobianDefault,NULL));
214   }
215 
216   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
217      Set solution vector and initial timestep
218      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
219 
220   dt   = appctx.h*appctx.h/2.0;
221   CHKERRQ(TSSetTimeStep(ts,dt));
222   CHKERRQ(TSSetSolution(ts,u));
223 
224   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
225      Customize timestepping solver:
226        - Set the solution method to be the Backward Euler method.
227        - Set timestepping duration info
228      Then set runtime options, which can override these defaults.
229      For example,
230           -ts_max_steps <maxsteps> -ts_max_time <maxtime>
231      to override the defaults set by TSSetMaxSteps()/TSSetMaxTime().
232      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
233 
234   CHKERRQ(TSSetMaxSteps(ts,time_steps_max));
235   CHKERRQ(TSSetMaxTime(ts,time_total_max));
236   CHKERRQ(TSSetExactFinalTime(ts,TS_EXACTFINALTIME_STEPOVER));
237   CHKERRQ(TSSetFromOptions(ts));
238 
239   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
240      Solve the problem
241      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
242 
243   /*
244      Evaluate initial conditions
245   */
246   CHKERRQ(InitialConditions(u,&appctx));
247 
248   /*
249      Run the timestepping solver
250   */
251   CHKERRQ(TSSolve(ts,u));
252   CHKERRQ(TSGetSolveTime(ts,&ftime));
253   CHKERRQ(TSGetStepNumber(ts,&steps));
254 
255   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
256      View timestepping solver info
257      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
258   CHKERRQ(PetscPrintf(PETSC_COMM_WORLD,"Total timesteps %D, Final time %g\n",steps,(double)ftime));
259   CHKERRQ(PetscPrintf(PETSC_COMM_WORLD,"Avg. error (2 norm) = %g Avg. error (max norm) = %g\n",(double)(appctx.norm_2/steps),(double)(appctx.norm_max/steps)));
260 
261   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
262      Free work space.  All PETSc objects should be destroyed when they
263      are no longer needed.
264      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
265 
266   CHKERRQ(TSDestroy(&ts));
267   CHKERRQ(MatDestroy(&A));
268   CHKERRQ(VecDestroy(&u));
269   CHKERRQ(PetscViewerDestroy(&appctx.viewer1));
270   CHKERRQ(PetscViewerDestroy(&appctx.viewer2));
271   CHKERRQ(VecDestroy(&appctx.localwork));
272   CHKERRQ(VecDestroy(&appctx.solution));
273   CHKERRQ(VecDestroy(&appctx.u_local));
274   CHKERRQ(DMDestroy(&appctx.da));
275 
276   /*
277      Always call PetscFinalize() before exiting a program.  This routine
278        - finalizes the PETSc libraries as well as MPI
279        - provides summary and diagnostic information if certain runtime
280          options are chosen (e.g., -log_view).
281   */
282   CHKERRQ(PetscFinalize());
283   return 0;
284 }
285 /* --------------------------------------------------------------------- */
286 /*
287    InitialConditions - Computes the solution at the initial time.
288 
289    Input Parameter:
290    u - uninitialized solution vector (global)
291    appctx - user-defined application context
292 
293    Output Parameter:
294    u - vector with solution at initial time (global)
295 */
296 PetscErrorCode InitialConditions(Vec u,AppCtx *appctx)
297 {
298   PetscScalar    *u_localptr,h = appctx->h;
299   PetscInt       i,mybase,myend;
300 
301   /*
302      Determine starting point of each processor's range of
303      grid values.
304   */
305   CHKERRQ(VecGetOwnershipRange(u,&mybase,&myend));
306 
307   /*
308     Get a pointer to vector data.
309     - For default PETSc vectors, VecGetArray() returns a pointer to
310       the data array.  Otherwise, the routine is implementation dependent.
311     - You MUST call VecRestoreArray() when you no longer need access to
312       the array.
313     - Note that the Fortran interface to VecGetArray() differs from the
314       C version.  See the users manual for details.
315   */
316   CHKERRQ(VecGetArray(u,&u_localptr));
317 
318   /*
319      We initialize the solution array by simply writing the solution
320      directly into the array locations.  Alternatively, we could use
321      VecSetValues() or VecSetValuesLocal().
322   */
323   for (i=mybase; i<myend; i++) u_localptr[i-mybase] = PetscSinScalar(PETSC_PI*i*6.*h) + 3.*PetscSinScalar(PETSC_PI*i*2.*h);
324 
325   /*
326      Restore vector
327   */
328   CHKERRQ(VecRestoreArray(u,&u_localptr));
329 
330   /*
331      Print debugging information if desired
332   */
333   if (appctx->debug) {
334     CHKERRQ(PetscPrintf(appctx->comm,"initial guess vector\n"));
335     CHKERRQ(VecView(u,PETSC_VIEWER_STDOUT_WORLD));
336   }
337 
338   return 0;
339 }
340 /* --------------------------------------------------------------------- */
341 /*
342    ExactSolution - Computes the exact solution at a given time.
343 
344    Input Parameters:
345    t - current time
346    solution - vector in which exact solution will be computed
347    appctx - user-defined application context
348 
349    Output Parameter:
350    solution - vector with the newly computed exact solution
351 */
352 PetscErrorCode ExactSolution(PetscReal t,Vec solution,AppCtx *appctx)
353 {
354   PetscScalar    *s_localptr,h = appctx->h,ex1,ex2,sc1,sc2;
355   PetscInt       i,mybase,myend;
356 
357   /*
358      Determine starting and ending points of each processor's
359      range of grid values
360   */
361   CHKERRQ(VecGetOwnershipRange(solution,&mybase,&myend));
362 
363   /*
364      Get a pointer to vector data.
365   */
366   CHKERRQ(VecGetArray(solution,&s_localptr));
367 
368   /*
369      Simply write the solution directly into the array locations.
370      Alternatively, we culd use VecSetValues() or VecSetValuesLocal().
371   */
372   ex1 = PetscExpReal(-36.*PETSC_PI*PETSC_PI*t); ex2 = PetscExpReal(-4.*PETSC_PI*PETSC_PI*t);
373   sc1 = PETSC_PI*6.*h;                 sc2 = PETSC_PI*2.*h;
374   for (i=mybase; i<myend; i++) s_localptr[i-mybase] = PetscSinScalar(sc1*(PetscReal)i)*ex1 + 3.*PetscSinScalar(sc2*(PetscReal)i)*ex2;
375 
376   /*
377      Restore vector
378   */
379   CHKERRQ(VecRestoreArray(solution,&s_localptr));
380   return 0;
381 }
382 /* --------------------------------------------------------------------- */
383 /*
384    Monitor - User-provided routine to monitor the solution computed at
385    each timestep.  This example plots the solution and computes the
386    error in two different norms.
387 
388    Input Parameters:
389    ts     - the timestep context
390    step   - the count of the current step (with 0 meaning the
391              initial condition)
392    time   - the current time
393    u      - the solution at this timestep
394    ctx    - the user-provided context for this monitoring routine.
395             In this case we use the application context which contains
396             information about the problem size, workspace and the exact
397             solution.
398 */
399 PetscErrorCode Monitor(TS ts,PetscInt step,PetscReal time,Vec u,void *ctx)
400 {
401   AppCtx         *appctx = (AppCtx*) ctx;   /* user-defined application context */
402   PetscReal      norm_2,norm_max;
403 
404   /*
405      View a graph of the current iterate
406   */
407   CHKERRQ(VecView(u,appctx->viewer2));
408 
409   /*
410      Compute the exact solution
411   */
412   CHKERRQ(ExactSolution(time,appctx->solution,appctx));
413 
414   /*
415      Print debugging information if desired
416   */
417   if (appctx->debug) {
418     CHKERRQ(PetscPrintf(appctx->comm,"Computed solution vector\n"));
419     CHKERRQ(VecView(u,PETSC_VIEWER_STDOUT_WORLD));
420     CHKERRQ(PetscPrintf(appctx->comm,"Exact solution vector\n"));
421     CHKERRQ(VecView(appctx->solution,PETSC_VIEWER_STDOUT_WORLD));
422   }
423 
424   /*
425      Compute the 2-norm and max-norm of the error
426   */
427   CHKERRQ(VecAXPY(appctx->solution,-1.0,u));
428   CHKERRQ(VecNorm(appctx->solution,NORM_2,&norm_2));
429   norm_2 = PetscSqrtReal(appctx->h)*norm_2;
430   CHKERRQ(VecNorm(appctx->solution,NORM_MAX,&norm_max));
431   if (norm_2   < 1e-14) norm_2   = 0;
432   if (norm_max < 1e-14) norm_max = 0;
433 
434   /*
435      PetscPrintf() causes only the first processor in this
436      communicator to print the timestep information.
437   */
438   CHKERRQ(PetscPrintf(appctx->comm,"Timestep %D: time = %g 2-norm error = %g max norm error = %g\n",step,(double)time,(double)norm_2,(double)norm_max));
439   appctx->norm_2   += norm_2;
440   appctx->norm_max += norm_max;
441 
442   /*
443      View a graph of the error
444   */
445   CHKERRQ(VecView(appctx->solution,appctx->viewer1));
446 
447   /*
448      Print debugging information if desired
449   */
450   if (appctx->debug) {
451     CHKERRQ(PetscPrintf(appctx->comm,"Error vector\n"));
452     CHKERRQ(VecView(appctx->solution,PETSC_VIEWER_STDOUT_WORLD));
453   }
454 
455   return 0;
456 }
457 
458 /* --------------------------------------------------------------------- */
459 /*
460    RHSMatrixHeat - User-provided routine to compute the right-hand-side
461    matrix for the heat equation.
462 
463    Input Parameters:
464    ts - the TS context
465    t - current time
466    global_in - global input vector
467    dummy - optional user-defined context, as set by TSetRHSJacobian()
468 
469    Output Parameters:
470    AA - Jacobian matrix
471    BB - optionally different preconditioning matrix
472    str - flag indicating matrix structure
473 
474   Notes:
475   RHSMatrixHeat computes entries for the locally owned part of the system.
476    - Currently, all PETSc parallel matrix formats are partitioned by
477      contiguous chunks of rows across the processors.
478    - Each processor needs to insert only elements that it owns
479      locally (but any non-local elements will be sent to the
480      appropriate processor during matrix assembly).
481    - Always specify global row and columns of matrix entries when
482      using MatSetValues(); we could alternatively use MatSetValuesLocal().
483    - Here, we set all entries for a particular row at once.
484    - Note that MatSetValues() uses 0-based row and column numbers
485      in Fortran as well as in C.
486 */
487 PetscErrorCode RHSMatrixHeat(TS ts,PetscReal t,Vec X,Mat AA,Mat BB,void *ctx)
488 {
489   Mat            A       = AA;              /* Jacobian matrix */
490   AppCtx         *appctx = (AppCtx*)ctx;     /* user-defined application context */
491   PetscInt       i,mstart,mend,idx[3];
492   PetscScalar    v[3],stwo = -2./(appctx->h*appctx->h),sone = -.5*stwo;
493 
494   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
495      Compute entries for the locally owned part of the matrix
496      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
497 
498   CHKERRQ(MatGetOwnershipRange(A,&mstart,&mend));
499 
500   /*
501      Set matrix rows corresponding to boundary data
502   */
503 
504   if (mstart == 0) {  /* first processor only */
505     v[0] = 1.0;
506     CHKERRQ(MatSetValues(A,1,&mstart,1,&mstart,v,INSERT_VALUES));
507     mstart++;
508   }
509 
510   if (mend == appctx->m) { /* last processor only */
511     mend--;
512     v[0] = 1.0;
513     CHKERRQ(MatSetValues(A,1,&mend,1,&mend,v,INSERT_VALUES));
514   }
515 
516   /*
517      Set matrix rows corresponding to interior data.  We construct the
518      matrix one row at a time.
519   */
520   v[0] = sone; v[1] = stwo; v[2] = sone;
521   for (i=mstart; i<mend; i++) {
522     idx[0] = i-1; idx[1] = i; idx[2] = i+1;
523     CHKERRQ(MatSetValues(A,1,&i,3,idx,v,INSERT_VALUES));
524   }
525 
526   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
527      Complete the matrix assembly process and set some options
528      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
529   /*
530      Assemble matrix, using the 2-step process:
531        MatAssemblyBegin(), MatAssemblyEnd()
532      Computations can be done while messages are in transition
533      by placing code between these two statements.
534   */
535   CHKERRQ(MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY));
536   CHKERRQ(MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY));
537 
538   /*
539      Set and option to indicate that we will never add a new nonzero location
540      to the matrix. If we do, it will generate an error.
541   */
542   CHKERRQ(MatSetOption(A,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE));
543 
544   return 0;
545 }
546 
547 PetscErrorCode RHSFunctionHeat(TS ts,PetscReal t,Vec globalin,Vec globalout,void *ctx)
548 {
549   Mat            A;
550 
551   PetscFunctionBeginUser;
552   CHKERRQ(TSGetRHSJacobian(ts,&A,NULL,NULL,&ctx));
553   CHKERRQ(RHSMatrixHeat(ts,t,globalin,A,NULL,ctx));
554   /* CHKERRQ(MatView(A,PETSC_VIEWER_STDOUT_WORLD)); */
555   CHKERRQ(MatMult(A,globalin,globalout));
556   PetscFunctionReturn(0);
557 }
558 
559 /*TEST
560 
561     test:
562       args: -ts_view -nox
563 
564     test:
565       suffix: 2
566       args: -ts_view -nox
567       nsize: 3
568 
569     test:
570       suffix: 3
571       args: -ts_view -nox -nonlinear
572 
573     test:
574       suffix: 4
575       args: -ts_view -nox -nonlinear
576       nsize: 3
577       timeoutfactor: 3
578 
579     test:
580       suffix: sundials
581       requires: sundials2
582       args: -nox -ts_type sundials -ts_max_steps 5 -nonlinear
583       nsize: 4
584 
585     test:
586       suffix: sundials_dense
587       requires: sundials2
588       args: -nox -ts_type sundials -ts_sundials_use_dense -ts_max_steps 5 -nonlinear
589       nsize: 1
590 
591 TEST*/
592