xref: /petsc/src/ts/tutorials/ex4.c (revision 8847d985fe1fdba95409f30cbb25abc5b8faf780)
1 static char help[] = "Solves a simple time-dependent linear PDE (the heat equation).\n\
2 Input parameters include:\n\
3   -m <points>, where <points> = number of grid points\n\
4   -time_dependent_rhs : Treat the problem as having a time-dependent right-hand side\n\
5   -debug              : Activate debugging printouts\n\
6   -nox                : Deactivate x-window graphics\n\n";
7 
8 /* ------------------------------------------------------------------------
9 
10    This program solves the one-dimensional heat equation (also called the
11    diffusion equation),
12        u_t = u_xx,
13    on the domain 0 <= x <= 1, with the boundary conditions
14        u(t,0) = 0, u(t,1) = 0,
15    and the initial condition
16        u(0,x) = sin(6*pi*x) + 3*sin(2*pi*x).
17    This is a linear, second-order, parabolic equation.
18 
19    We discretize the right-hand side using finite differences with
20    uniform grid spacing h:
21        u_xx = (u_{i+1} - 2u_{i} + u_{i-1})/(h^2)
22    We then demonstrate time evolution using the various TS methods by
23    running the program via
24        mpiexec -n <procs> ex3 -ts_type <timestepping solver>
25 
26    We compare the approximate solution with the exact solution, given by
27        u_exact(x,t) = exp(-36*pi*pi*t) * sin(6*pi*x) +
28                       3*exp(-4*pi*pi*t) * sin(2*pi*x)
29 
30    Notes:
31    This code demonstrates the TS solver interface to two variants of
32    linear problems, u_t = f(u,t), namely
33      - time-dependent f:   f(u,t) is a function of t
34      - time-independent f: f(u,t) is simply f(u)
35 
36     The uniprocessor version of this code is ts/tutorials/ex3.c
37 
38   ------------------------------------------------------------------------- */
39 
40 /*
41    Include "petscdmda.h" so that we can use distributed arrays (DMDAs) to manage
42    the parallel grid.  Include "petscts.h" so that we can use TS solvers.
43    Note that this file automatically includes:
44      petscsys.h       - base PETSc routines   petscvec.h  - vectors
45      petscmat.h  - matrices
46      petscis.h     - index sets            petscksp.h  - Krylov subspace methods
47      petscviewer.h - viewers               petscpc.h   - preconditioners
48      petscksp.h   - linear solvers        petscsnes.h - nonlinear solvers
49 */
50 
51 #include <petscdm.h>
52 #include <petscdmda.h>
53 #include <petscts.h>
54 #include <petscdraw.h>
55 
56 /*
57    User-defined application context - contains data needed by the
58    application-provided call-back routines.
59 */
60 typedef struct {
61   MPI_Comm    comm;             /* communicator */
62   DM          da;               /* distributed array data structure */
63   Vec         localwork;        /* local ghosted work vector */
64   Vec         u_local;          /* local ghosted approximate solution vector */
65   Vec         solution;         /* global exact solution vector */
66   PetscInt    m;                /* total number of grid points */
67   PetscReal   h;                /* mesh width h = 1/(m-1) */
68   PetscBool   debug;            /* flag (1 indicates activation of debugging printouts) */
69   PetscViewer viewer1, viewer2; /* viewers for the solution and error */
70   PetscReal   norm_2, norm_max; /* error norms */
71 } AppCtx;
72 
73 /*
74    User-defined routines
75 */
76 extern PetscErrorCode InitialConditions(Vec, AppCtx *);
77 extern PetscErrorCode RHSMatrixHeat(TS, PetscReal, Vec, Mat, Mat, void *);
78 extern PetscErrorCode RHSFunctionHeat(TS, PetscReal, Vec, Vec, void *);
79 extern PetscErrorCode Monitor(TS, PetscInt, PetscReal, Vec, void *);
80 extern PetscErrorCode ExactSolution(PetscReal, Vec, AppCtx *);
81 
82 int main(int argc, char **argv)
83 {
84   AppCtx        appctx;               /* user-defined application context */
85   TS            ts;                   /* timestepping context */
86   Mat           A;                    /* matrix data structure */
87   Vec           u;                    /* approximate solution vector */
88   PetscReal     time_total_max = 1.0; /* default max total time */
89   PetscInt      time_steps_max = 100; /* default max timesteps */
90   PetscDraw     draw;                 /* drawing context */
91   PetscInt      steps, m;
92   PetscMPIInt   size;
93   PetscReal     dt, ftime;
94   PetscBool     flg;
95   TSProblemType tsproblem = TS_LINEAR;
96 
97   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
98      Initialize program and set problem parameters
99      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
100 
101   PetscFunctionBeginUser;
102   PetscCall(PetscInitialize(&argc, &argv, (char *)0, help));
103   appctx.comm = PETSC_COMM_WORLD;
104 
105   m = 60;
106   PetscCall(PetscOptionsGetInt(NULL, NULL, "-m", &m, NULL));
107   PetscCall(PetscOptionsHasName(NULL, NULL, "-debug", &appctx.debug));
108   appctx.m        = m;
109   appctx.h        = 1.0 / (m - 1.0);
110   appctx.norm_2   = 0.0;
111   appctx.norm_max = 0.0;
112 
113   PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &size));
114   PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Solving a linear TS problem, number of processors = %d\n", size));
115 
116   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
117      Create vector data structures
118      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
119   /*
120      Create distributed array (DMDA) to manage parallel grid and vectors
121      and to set up the ghost point communication pattern.  There are M
122      total grid values spread equally among all the processors.
123   */
124 
125   PetscCall(DMDACreate1d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, m, 1, 1, NULL, &appctx.da));
126   PetscCall(DMSetFromOptions(appctx.da));
127   PetscCall(DMSetUp(appctx.da));
128 
129   /*
130      Extract global and local vectors from DMDA; we use these to store the
131      approximate solution.  Then duplicate these for remaining vectors that
132      have the same types.
133   */
134   PetscCall(DMCreateGlobalVector(appctx.da, &u));
135   PetscCall(DMCreateLocalVector(appctx.da, &appctx.u_local));
136 
137   /*
138      Create local work vector for use in evaluating right-hand-side function;
139      create global work vector for storing exact solution.
140   */
141   PetscCall(VecDuplicate(appctx.u_local, &appctx.localwork));
142   PetscCall(VecDuplicate(u, &appctx.solution));
143 
144   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
145      Set up displays to show graphs of the solution and error
146      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
147 
148   PetscCall(PetscViewerDrawOpen(PETSC_COMM_WORLD, 0, "", 80, 380, 400, 160, &appctx.viewer1));
149   PetscCall(PetscViewerDrawGetDraw(appctx.viewer1, 0, &draw));
150   PetscCall(PetscDrawSetDoubleBuffer(draw));
151   PetscCall(PetscViewerDrawOpen(PETSC_COMM_WORLD, 0, "", 80, 0, 400, 160, &appctx.viewer2));
152   PetscCall(PetscViewerDrawGetDraw(appctx.viewer2, 0, &draw));
153   PetscCall(PetscDrawSetDoubleBuffer(draw));
154 
155   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
156      Create timestepping solver context
157      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
158 
159   PetscCall(TSCreate(PETSC_COMM_WORLD, &ts));
160 
161   flg = PETSC_FALSE;
162   PetscCall(PetscOptionsGetBool(NULL, NULL, "-nonlinear", &flg, NULL));
163   PetscCall(TSSetProblemType(ts, flg ? TS_NONLINEAR : TS_LINEAR));
164 
165   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
166      Set optional user-defined monitoring routine
167      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
168   PetscCall(TSMonitorSet(ts, Monitor, &appctx, NULL));
169 
170   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
171 
172      Create matrix data structure; set matrix evaluation routine.
173      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
174 
175   PetscCall(MatCreate(PETSC_COMM_WORLD, &A));
176   PetscCall(MatSetSizes(A, PETSC_DECIDE, PETSC_DECIDE, m, m));
177   PetscCall(MatSetFromOptions(A));
178   PetscCall(MatSetUp(A));
179 
180   flg = PETSC_FALSE;
181   PetscCall(PetscOptionsGetBool(NULL, NULL, "-time_dependent_rhs", &flg, NULL));
182   if (flg) {
183     /*
184        For linear problems with a time-dependent f(u,t) in the equation
185        u_t = f(u,t), the user provides the discretized right-hand-side
186        as a time-dependent matrix.
187     */
188     PetscCall(TSSetRHSFunction(ts, NULL, TSComputeRHSFunctionLinear, &appctx));
189     PetscCall(TSSetRHSJacobian(ts, A, A, RHSMatrixHeat, &appctx));
190   } else {
191     /*
192        For linear problems with a time-independent f(u) in the equation
193        u_t = f(u), the user provides the discretized right-hand-side
194        as a matrix only once, and then sets a null matrix evaluation
195        routine.
196     */
197     PetscCall(RHSMatrixHeat(ts, 0.0, u, A, A, &appctx));
198     PetscCall(TSSetRHSFunction(ts, NULL, TSComputeRHSFunctionLinear, &appctx));
199     PetscCall(TSSetRHSJacobian(ts, A, A, TSComputeRHSJacobianConstant, &appctx));
200   }
201 
202   if (tsproblem == TS_NONLINEAR) {
203     SNES snes;
204     PetscCall(TSSetRHSFunction(ts, NULL, RHSFunctionHeat, &appctx));
205     PetscCall(TSGetSNES(ts, &snes));
206     PetscCall(SNESSetJacobian(snes, NULL, NULL, SNESComputeJacobianDefault, NULL));
207   }
208 
209   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
210      Set solution vector and initial timestep
211      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
212 
213   dt = appctx.h * appctx.h / 2.0;
214   PetscCall(TSSetTimeStep(ts, dt));
215   PetscCall(TSSetSolution(ts, u));
216 
217   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
218      Customize timestepping solver:
219        - Set the solution method to be the Backward Euler method.
220        - Set timestepping duration info
221      Then set runtime options, which can override these defaults.
222      For example,
223           -ts_max_steps <maxsteps> -ts_max_time <maxtime>
224      to override the defaults set by TSSetMaxSteps()/TSSetMaxTime().
225      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
226 
227   PetscCall(TSSetMaxSteps(ts, time_steps_max));
228   PetscCall(TSSetMaxTime(ts, time_total_max));
229   PetscCall(TSSetExactFinalTime(ts, TS_EXACTFINALTIME_STEPOVER));
230   PetscCall(TSSetFromOptions(ts));
231 
232   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
233      Solve the problem
234      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
235 
236   /*
237      Evaluate initial conditions
238   */
239   PetscCall(InitialConditions(u, &appctx));
240 
241   /*
242      Run the timestepping solver
243   */
244   PetscCall(TSSolve(ts, u));
245   PetscCall(TSGetSolveTime(ts, &ftime));
246   PetscCall(TSGetStepNumber(ts, &steps));
247 
248   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
249      View timestepping solver info
250      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
251   PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Total timesteps %" PetscInt_FMT ", Final time %g\n", steps, (double)ftime));
252   PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Avg. error (2 norm) = %g Avg. error (max norm) = %g\n", (double)(appctx.norm_2 / steps), (double)(appctx.norm_max / steps)));
253 
254   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
255      Free work space.  All PETSc objects should be destroyed when they
256      are no longer needed.
257      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
258 
259   PetscCall(TSDestroy(&ts));
260   PetscCall(MatDestroy(&A));
261   PetscCall(VecDestroy(&u));
262   PetscCall(PetscViewerDestroy(&appctx.viewer1));
263   PetscCall(PetscViewerDestroy(&appctx.viewer2));
264   PetscCall(VecDestroy(&appctx.localwork));
265   PetscCall(VecDestroy(&appctx.solution));
266   PetscCall(VecDestroy(&appctx.u_local));
267   PetscCall(DMDestroy(&appctx.da));
268 
269   /*
270      Always call PetscFinalize() before exiting a program.  This routine
271        - finalizes the PETSc libraries as well as MPI
272        - provides summary and diagnostic information if certain runtime
273          options are chosen (e.g., -log_view).
274   */
275   PetscCall(PetscFinalize());
276   return 0;
277 }
278 /* --------------------------------------------------------------------- */
279 /*
280    InitialConditions - Computes the solution at the initial time.
281 
282    Input Parameter:
283    u - uninitialized solution vector (global)
284    appctx - user-defined application context
285 
286    Output Parameter:
287    u - vector with solution at initial time (global)
288 */
289 PetscErrorCode InitialConditions(Vec u, AppCtx *appctx)
290 {
291   PetscScalar *u_localptr, h = appctx->h;
292   PetscInt     i, mybase, myend;
293 
294   PetscFunctionBeginUser;
295   /*
296      Determine starting point of each processor's range of
297      grid values.
298   */
299   PetscCall(VecGetOwnershipRange(u, &mybase, &myend));
300 
301   /*
302     Get a pointer to vector data.
303     - For default PETSc vectors, VecGetArray() returns a pointer to
304       the data array.  Otherwise, the routine is implementation dependent.
305     - You MUST call VecRestoreArray() when you no longer need access to
306       the array.
307     - Note that the Fortran interface to VecGetArray() differs from the
308       C version.  See the users manual for details.
309   */
310   PetscCall(VecGetArray(u, &u_localptr));
311 
312   /*
313      We initialize the solution array by simply writing the solution
314      directly into the array locations.  Alternatively, we could use
315      VecSetValues() or VecSetValuesLocal().
316   */
317   for (i = mybase; i < myend; i++) u_localptr[i - mybase] = PetscSinScalar(PETSC_PI * i * 6. * h) + 3. * PetscSinScalar(PETSC_PI * i * 2. * h);
318 
319   /*
320      Restore vector
321   */
322   PetscCall(VecRestoreArray(u, &u_localptr));
323 
324   /*
325      Print debugging information if desired
326   */
327   if (appctx->debug) {
328     PetscCall(PetscPrintf(appctx->comm, "initial guess vector\n"));
329     PetscCall(VecView(u, PETSC_VIEWER_STDOUT_WORLD));
330   }
331 
332   PetscFunctionReturn(PETSC_SUCCESS);
333 }
334 /* --------------------------------------------------------------------- */
335 /*
336    ExactSolution - Computes the exact solution at a given time.
337 
338    Input Parameters:
339    t - current time
340    solution - vector in which exact solution will be computed
341    appctx - user-defined application context
342 
343    Output Parameter:
344    solution - vector with the newly computed exact solution
345 */
346 PetscErrorCode ExactSolution(PetscReal t, Vec solution, AppCtx *appctx)
347 {
348   PetscScalar *s_localptr, h = appctx->h, ex1, ex2, sc1, sc2;
349   PetscInt     i, mybase, myend;
350 
351   PetscFunctionBeginUser;
352   /*
353      Determine starting and ending points of each processor's
354      range of grid values
355   */
356   PetscCall(VecGetOwnershipRange(solution, &mybase, &myend));
357 
358   /*
359      Get a pointer to vector data.
360   */
361   PetscCall(VecGetArray(solution, &s_localptr));
362 
363   /*
364      Simply write the solution directly into the array locations.
365      Alternatively, we culd use VecSetValues() or VecSetValuesLocal().
366   */
367   ex1 = PetscExpReal(-36. * PETSC_PI * PETSC_PI * t);
368   ex2 = PetscExpReal(-4. * PETSC_PI * PETSC_PI * t);
369   sc1 = PETSC_PI * 6. * h;
370   sc2 = PETSC_PI * 2. * h;
371   for (i = mybase; i < myend; i++) s_localptr[i - mybase] = PetscSinScalar(sc1 * (PetscReal)i) * ex1 + 3. * PetscSinScalar(sc2 * (PetscReal)i) * ex2;
372 
373   /*
374      Restore vector
375   */
376   PetscCall(VecRestoreArray(solution, &s_localptr));
377   PetscFunctionReturn(PETSC_SUCCESS);
378 }
379 /* --------------------------------------------------------------------- */
380 /*
381    Monitor - User-provided routine to monitor the solution computed at
382    each timestep.  This example plots the solution and computes the
383    error in two different norms.
384 
385    Input Parameters:
386    ts     - the timestep context
387    step   - the count of the current step (with 0 meaning the
388              initial condition)
389    time   - the current time
390    u      - the solution at this timestep
391    ctx    - the user-provided context for this monitoring routine.
392             In this case we use the application context which contains
393             information about the problem size, workspace and the exact
394             solution.
395 */
396 PetscErrorCode Monitor(TS ts, PetscInt step, PetscReal time, Vec u, void *ctx)
397 {
398   AppCtx   *appctx = (AppCtx *)ctx; /* user-defined application context */
399   PetscReal norm_2, norm_max;
400 
401   PetscFunctionBeginUser;
402   /*
403      View a graph of the current iterate
404   */
405   PetscCall(VecView(u, appctx->viewer2));
406 
407   /*
408      Compute the exact solution
409   */
410   PetscCall(ExactSolution(time, appctx->solution, appctx));
411 
412   /*
413      Print debugging information if desired
414   */
415   if (appctx->debug) {
416     PetscCall(PetscPrintf(appctx->comm, "Computed solution vector\n"));
417     PetscCall(VecView(u, PETSC_VIEWER_STDOUT_WORLD));
418     PetscCall(PetscPrintf(appctx->comm, "Exact solution vector\n"));
419     PetscCall(VecView(appctx->solution, PETSC_VIEWER_STDOUT_WORLD));
420   }
421 
422   /*
423      Compute the 2-norm and max-norm of the error
424   */
425   PetscCall(VecAXPY(appctx->solution, -1.0, u));
426   PetscCall(VecNorm(appctx->solution, NORM_2, &norm_2));
427   norm_2 = PetscSqrtReal(appctx->h) * norm_2;
428   PetscCall(VecNorm(appctx->solution, NORM_MAX, &norm_max));
429   if (norm_2 < 1e-14) norm_2 = 0;
430   if (norm_max < 1e-14) norm_max = 0;
431 
432   /*
433      PetscPrintf() causes only the first processor in this
434      communicator to print the timestep information.
435   */
436   PetscCall(PetscPrintf(appctx->comm, "Timestep %" PetscInt_FMT ": time = %g 2-norm error = %g max norm error = %g\n", step, (double)time, (double)norm_2, (double)norm_max));
437   appctx->norm_2 += norm_2;
438   appctx->norm_max += norm_max;
439 
440   /*
441      View a graph of the error
442   */
443   PetscCall(VecView(appctx->solution, appctx->viewer1));
444 
445   /*
446      Print debugging information if desired
447   */
448   if (appctx->debug) {
449     PetscCall(PetscPrintf(appctx->comm, "Error vector\n"));
450     PetscCall(VecView(appctx->solution, PETSC_VIEWER_STDOUT_WORLD));
451   }
452 
453   PetscFunctionReturn(PETSC_SUCCESS);
454 }
455 
456 /* --------------------------------------------------------------------- */
457 /*
458    RHSMatrixHeat - User-provided routine to compute the right-hand-side
459    matrix for the heat equation.
460 
461    Input Parameters:
462    ts - the TS context
463    t - current time
464    global_in - global input vector
465    dummy - optional user-defined context, as set by TSetRHSJacobian()
466 
467    Output Parameters:
468    AA - Jacobian matrix
469    BB - optionally different preconditioning matrix
470    str - flag indicating matrix structure
471 
472   Notes:
473   RHSMatrixHeat computes entries for the locally owned part of the system.
474    - Currently, all PETSc parallel matrix formats are partitioned by
475      contiguous chunks of rows across the processors.
476    - Each processor needs to insert only elements that it owns
477      locally (but any non-local elements will be sent to the
478      appropriate processor during matrix assembly).
479    - Always specify global row and columns of matrix entries when
480      using MatSetValues(); we could alternatively use MatSetValuesLocal().
481    - Here, we set all entries for a particular row at once.
482    - Note that MatSetValues() uses 0-based row and column numbers
483      in Fortran as well as in C.
484 */
485 PetscErrorCode RHSMatrixHeat(TS ts, PetscReal t, Vec X, Mat AA, Mat BB, void *ctx)
486 {
487   Mat         A      = AA;            /* Jacobian matrix */
488   AppCtx     *appctx = (AppCtx *)ctx; /* user-defined application context */
489   PetscInt    i, mstart, mend, idx[3];
490   PetscScalar v[3], stwo = -2. / (appctx->h * appctx->h), sone = -.5 * stwo;
491 
492   PetscFunctionBeginUser;
493   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
494      Compute entries for the locally owned part of the matrix
495      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
496 
497   PetscCall(MatGetOwnershipRange(A, &mstart, &mend));
498 
499   /*
500      Set matrix rows corresponding to boundary data
501   */
502 
503   if (mstart == 0) { /* first processor only */
504     v[0] = 1.0;
505     PetscCall(MatSetValues(A, 1, &mstart, 1, &mstart, v, INSERT_VALUES));
506     mstart++;
507   }
508 
509   if (mend == appctx->m) { /* last processor only */
510     mend--;
511     v[0] = 1.0;
512     PetscCall(MatSetValues(A, 1, &mend, 1, &mend, v, INSERT_VALUES));
513   }
514 
515   /*
516      Set matrix rows corresponding to interior data.  We construct the
517      matrix one row at a time.
518   */
519   v[0] = sone;
520   v[1] = stwo;
521   v[2] = sone;
522   for (i = mstart; i < mend; i++) {
523     idx[0] = i - 1;
524     idx[1] = i;
525     idx[2] = i + 1;
526     PetscCall(MatSetValues(A, 1, &i, 3, idx, v, INSERT_VALUES));
527   }
528 
529   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
530      Complete the matrix assembly process and set some options
531      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
532   /*
533      Assemble matrix, using the 2-step process:
534        MatAssemblyBegin(), MatAssemblyEnd()
535      Computations can be done while messages are in transition
536      by placing code between these two statements.
537   */
538   PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY));
539   PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY));
540 
541   /*
542      Set and option to indicate that we will never add a new nonzero location
543      to the matrix. If we do, it will generate an error.
544   */
545   PetscCall(MatSetOption(A, MAT_NEW_NONZERO_LOCATION_ERR, PETSC_TRUE));
546 
547   PetscFunctionReturn(PETSC_SUCCESS);
548 }
549 
550 PetscErrorCode RHSFunctionHeat(TS ts, PetscReal t, Vec globalin, Vec globalout, void *ctx)
551 {
552   Mat A;
553 
554   PetscFunctionBeginUser;
555   PetscCall(TSGetRHSJacobian(ts, &A, NULL, NULL, &ctx));
556   PetscCall(RHSMatrixHeat(ts, t, globalin, A, NULL, ctx));
557   /* PetscCall(MatView(A,PETSC_VIEWER_STDOUT_WORLD)); */
558   PetscCall(MatMult(A, globalin, globalout));
559   PetscFunctionReturn(PETSC_SUCCESS);
560 }
561 
562 /*TEST
563 
564     test:
565       args: -ts_view -nox
566 
567     test:
568       suffix: 2
569       args: -ts_view -nox
570       nsize: 3
571 
572     test:
573       suffix: 3
574       args: -ts_view -nox -nonlinear
575 
576     test:
577       suffix: 4
578       args: -ts_view -nox -nonlinear
579       nsize: 3
580       timeoutfactor: 3
581 
582     test:
583       suffix: sundials
584       requires: sundials2
585       args: -nox -ts_type sundials -ts_max_steps 5 -nonlinear
586       nsize: 4
587 
588     test:
589       suffix: sundials_dense
590       requires: sundials2
591       args: -nox -ts_type sundials -ts_sundials_use_dense -ts_max_steps 5 -nonlinear
592       nsize: 1
593 
594 TEST*/
595