xref: /petsc/src/ts/tutorials/ex4.c (revision 65071d2214bdae2ccf59f48b52fa27438c4b6236)
1 
2 static char help[] = "Solves a simple time-dependent linear PDE (the heat equation).\n\
3 Input parameters include:\n\
4   -m <points>, where <points> = number of grid points\n\
5   -time_dependent_rhs : Treat the problem as having a time-dependent right-hand side\n\
6   -debug              : Activate debugging printouts\n\
7   -nox                : Deactivate x-window graphics\n\n";
8 
9 /* ------------------------------------------------------------------------
10 
11    This program solves the one-dimensional heat equation (also called the
12    diffusion equation),
13        u_t = u_xx,
14    on the domain 0 <= x <= 1, with the boundary conditions
15        u(t,0) = 0, u(t,1) = 0,
16    and the initial condition
17        u(0,x) = sin(6*pi*x) + 3*sin(2*pi*x).
18    This is a linear, second-order, parabolic equation.
19 
20    We discretize the right-hand side using finite differences with
21    uniform grid spacing h:
22        u_xx = (u_{i+1} - 2u_{i} + u_{i-1})/(h^2)
23    We then demonstrate time evolution using the various TS methods by
24    running the program via
25        mpiexec -n <procs> ex3 -ts_type <timestepping solver>
26 
27    We compare the approximate solution with the exact solution, given by
28        u_exact(x,t) = exp(-36*pi*pi*t) * sin(6*pi*x) +
29                       3*exp(-4*pi*pi*t) * sin(2*pi*x)
30 
31    Notes:
32    This code demonstrates the TS solver interface to two variants of
33    linear problems, u_t = f(u,t), namely
34      - time-dependent f:   f(u,t) is a function of t
35      - time-independent f: f(u,t) is simply f(u)
36 
37     The uniprocessor version of this code is ts/tutorials/ex3.c
38 
39   ------------------------------------------------------------------------- */
40 
41 /*
42    Include "petscdmda.h" so that we can use distributed arrays (DMDAs) to manage
43    the parallel grid.  Include "petscts.h" so that we can use TS solvers.
44    Note that this file automatically includes:
45      petscsys.h       - base PETSc routines   petscvec.h  - vectors
46      petscmat.h  - matrices
47      petscis.h     - index sets            petscksp.h  - Krylov subspace methods
48      petscviewer.h - viewers               petscpc.h   - preconditioners
49      petscksp.h   - linear solvers        petscsnes.h - nonlinear solvers
50 */
51 
52 #include <petscdm.h>
53 #include <petscdmda.h>
54 #include <petscts.h>
55 #include <petscdraw.h>
56 
57 /*
58    User-defined application context - contains data needed by the
59    application-provided call-back routines.
60 */
61 typedef struct {
62   MPI_Comm    comm;             /* communicator */
63   DM          da;               /* distributed array data structure */
64   Vec         localwork;        /* local ghosted work vector */
65   Vec         u_local;          /* local ghosted approximate solution vector */
66   Vec         solution;         /* global exact solution vector */
67   PetscInt    m;                /* total number of grid points */
68   PetscReal   h;                /* mesh width h = 1/(m-1) */
69   PetscBool   debug;            /* flag (1 indicates activation of debugging printouts) */
70   PetscViewer viewer1, viewer2; /* viewers for the solution and error */
71   PetscReal   norm_2, norm_max; /* error norms */
72 } AppCtx;
73 
74 /*
75    User-defined routines
76 */
77 extern PetscErrorCode InitialConditions(Vec, AppCtx *);
78 extern PetscErrorCode RHSMatrixHeat(TS, PetscReal, Vec, Mat, Mat, void *);
79 extern PetscErrorCode RHSFunctionHeat(TS, PetscReal, Vec, Vec, void *);
80 extern PetscErrorCode Monitor(TS, PetscInt, PetscReal, Vec, void *);
81 extern PetscErrorCode ExactSolution(PetscReal, Vec, AppCtx *);
82 
83 int main(int argc, char **argv)
84 {
85   AppCtx        appctx;               /* user-defined application context */
86   TS            ts;                   /* timestepping context */
87   Mat           A;                    /* matrix data structure */
88   Vec           u;                    /* approximate solution vector */
89   PetscReal     time_total_max = 1.0; /* default max total time */
90   PetscInt      time_steps_max = 100; /* default max timesteps */
91   PetscDraw     draw;                 /* drawing context */
92   PetscInt      steps, m;
93   PetscMPIInt   size;
94   PetscReal     dt, ftime;
95   PetscBool     flg;
96   TSProblemType tsproblem = TS_LINEAR;
97 
98   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
99      Initialize program and set problem parameters
100      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
101 
102   PetscFunctionBeginUser;
103   PetscCall(PetscInitialize(&argc, &argv, (char *)0, help));
104   appctx.comm = PETSC_COMM_WORLD;
105 
106   m = 60;
107   PetscCall(PetscOptionsGetInt(NULL, NULL, "-m", &m, NULL));
108   PetscCall(PetscOptionsHasName(NULL, NULL, "-debug", &appctx.debug));
109   appctx.m        = m;
110   appctx.h        = 1.0 / (m - 1.0);
111   appctx.norm_2   = 0.0;
112   appctx.norm_max = 0.0;
113 
114   PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &size));
115   PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Solving a linear TS problem, number of processors = %d\n", size));
116 
117   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
118      Create vector data structures
119      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
120   /*
121      Create distributed array (DMDA) to manage parallel grid and vectors
122      and to set up the ghost point communication pattern.  There are M
123      total grid values spread equally among all the processors.
124   */
125 
126   PetscCall(DMDACreate1d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, m, 1, 1, NULL, &appctx.da));
127   PetscCall(DMSetFromOptions(appctx.da));
128   PetscCall(DMSetUp(appctx.da));
129 
130   /*
131      Extract global and local vectors from DMDA; we use these to store the
132      approximate solution.  Then duplicate these for remaining vectors that
133      have the same types.
134   */
135   PetscCall(DMCreateGlobalVector(appctx.da, &u));
136   PetscCall(DMCreateLocalVector(appctx.da, &appctx.u_local));
137 
138   /*
139      Create local work vector for use in evaluating right-hand-side function;
140      create global work vector for storing exact solution.
141   */
142   PetscCall(VecDuplicate(appctx.u_local, &appctx.localwork));
143   PetscCall(VecDuplicate(u, &appctx.solution));
144 
145   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
146      Set up displays to show graphs of the solution and error
147      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
148 
149   PetscCall(PetscViewerDrawOpen(PETSC_COMM_WORLD, 0, "", 80, 380, 400, 160, &appctx.viewer1));
150   PetscCall(PetscViewerDrawGetDraw(appctx.viewer1, 0, &draw));
151   PetscCall(PetscDrawSetDoubleBuffer(draw));
152   PetscCall(PetscViewerDrawOpen(PETSC_COMM_WORLD, 0, "", 80, 0, 400, 160, &appctx.viewer2));
153   PetscCall(PetscViewerDrawGetDraw(appctx.viewer2, 0, &draw));
154   PetscCall(PetscDrawSetDoubleBuffer(draw));
155 
156   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
157      Create timestepping solver context
158      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
159 
160   PetscCall(TSCreate(PETSC_COMM_WORLD, &ts));
161 
162   flg = PETSC_FALSE;
163   PetscCall(PetscOptionsGetBool(NULL, NULL, "-nonlinear", &flg, NULL));
164   PetscCall(TSSetProblemType(ts, flg ? TS_NONLINEAR : TS_LINEAR));
165 
166   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
167      Set optional user-defined monitoring routine
168      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
169   PetscCall(TSMonitorSet(ts, Monitor, &appctx, NULL));
170 
171   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
172 
173      Create matrix data structure; set matrix evaluation routine.
174      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
175 
176   PetscCall(MatCreate(PETSC_COMM_WORLD, &A));
177   PetscCall(MatSetSizes(A, PETSC_DECIDE, PETSC_DECIDE, m, m));
178   PetscCall(MatSetFromOptions(A));
179   PetscCall(MatSetUp(A));
180 
181   flg = PETSC_FALSE;
182   PetscCall(PetscOptionsGetBool(NULL, NULL, "-time_dependent_rhs", &flg, NULL));
183   if (flg) {
184     /*
185        For linear problems with a time-dependent f(u,t) in the equation
186        u_t = f(u,t), the user provides the discretized right-hand-side
187        as a time-dependent matrix.
188     */
189     PetscCall(TSSetRHSFunction(ts, NULL, TSComputeRHSFunctionLinear, &appctx));
190     PetscCall(TSSetRHSJacobian(ts, A, A, RHSMatrixHeat, &appctx));
191   } else {
192     /*
193        For linear problems with a time-independent f(u) in the equation
194        u_t = f(u), the user provides the discretized right-hand-side
195        as a matrix only once, and then sets a null matrix evaluation
196        routine.
197     */
198     PetscCall(RHSMatrixHeat(ts, 0.0, u, A, A, &appctx));
199     PetscCall(TSSetRHSFunction(ts, NULL, TSComputeRHSFunctionLinear, &appctx));
200     PetscCall(TSSetRHSJacobian(ts, A, A, TSComputeRHSJacobianConstant, &appctx));
201   }
202 
203   if (tsproblem == TS_NONLINEAR) {
204     SNES snes;
205     PetscCall(TSSetRHSFunction(ts, NULL, RHSFunctionHeat, &appctx));
206     PetscCall(TSGetSNES(ts, &snes));
207     PetscCall(SNESSetJacobian(snes, NULL, NULL, SNESComputeJacobianDefault, NULL));
208   }
209 
210   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
211      Set solution vector and initial timestep
212      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
213 
214   dt = appctx.h * appctx.h / 2.0;
215   PetscCall(TSSetTimeStep(ts, dt));
216   PetscCall(TSSetSolution(ts, u));
217 
218   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
219      Customize timestepping solver:
220        - Set the solution method to be the Backward Euler method.
221        - Set timestepping duration info
222      Then set runtime options, which can override these defaults.
223      For example,
224           -ts_max_steps <maxsteps> -ts_max_time <maxtime>
225      to override the defaults set by TSSetMaxSteps()/TSSetMaxTime().
226      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
227 
228   PetscCall(TSSetMaxSteps(ts, time_steps_max));
229   PetscCall(TSSetMaxTime(ts, time_total_max));
230   PetscCall(TSSetExactFinalTime(ts, TS_EXACTFINALTIME_STEPOVER));
231   PetscCall(TSSetFromOptions(ts));
232 
233   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
234      Solve the problem
235      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
236 
237   /*
238      Evaluate initial conditions
239   */
240   PetscCall(InitialConditions(u, &appctx));
241 
242   /*
243      Run the timestepping solver
244   */
245   PetscCall(TSSolve(ts, u));
246   PetscCall(TSGetSolveTime(ts, &ftime));
247   PetscCall(TSGetStepNumber(ts, &steps));
248 
249   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
250      View timestepping solver info
251      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
252   PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Total timesteps %" PetscInt_FMT ", Final time %g\n", steps, (double)ftime));
253   PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Avg. error (2 norm) = %g Avg. error (max norm) = %g\n", (double)(appctx.norm_2 / steps), (double)(appctx.norm_max / steps)));
254 
255   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
256      Free work space.  All PETSc objects should be destroyed when they
257      are no longer needed.
258      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
259 
260   PetscCall(TSDestroy(&ts));
261   PetscCall(MatDestroy(&A));
262   PetscCall(VecDestroy(&u));
263   PetscCall(PetscViewerDestroy(&appctx.viewer1));
264   PetscCall(PetscViewerDestroy(&appctx.viewer2));
265   PetscCall(VecDestroy(&appctx.localwork));
266   PetscCall(VecDestroy(&appctx.solution));
267   PetscCall(VecDestroy(&appctx.u_local));
268   PetscCall(DMDestroy(&appctx.da));
269 
270   /*
271      Always call PetscFinalize() before exiting a program.  This routine
272        - finalizes the PETSc libraries as well as MPI
273        - provides summary and diagnostic information if certain runtime
274          options are chosen (e.g., -log_view).
275   */
276   PetscCall(PetscFinalize());
277   return 0;
278 }
279 /* --------------------------------------------------------------------- */
280 /*
281    InitialConditions - Computes the solution at the initial time.
282 
283    Input Parameter:
284    u - uninitialized solution vector (global)
285    appctx - user-defined application context
286 
287    Output Parameter:
288    u - vector with solution at initial time (global)
289 */
290 PetscErrorCode InitialConditions(Vec u, AppCtx *appctx)
291 {
292   PetscScalar *u_localptr, h = appctx->h;
293   PetscInt     i, mybase, myend;
294 
295   /*
296      Determine starting point of each processor's range of
297      grid values.
298   */
299   PetscCall(VecGetOwnershipRange(u, &mybase, &myend));
300 
301   /*
302     Get a pointer to vector data.
303     - For default PETSc vectors, VecGetArray() returns a pointer to
304       the data array.  Otherwise, the routine is implementation dependent.
305     - You MUST call VecRestoreArray() when you no longer need access to
306       the array.
307     - Note that the Fortran interface to VecGetArray() differs from the
308       C version.  See the users manual for details.
309   */
310   PetscCall(VecGetArray(u, &u_localptr));
311 
312   /*
313      We initialize the solution array by simply writing the solution
314      directly into the array locations.  Alternatively, we could use
315      VecSetValues() or VecSetValuesLocal().
316   */
317   for (i = mybase; i < myend; i++) u_localptr[i - mybase] = PetscSinScalar(PETSC_PI * i * 6. * h) + 3. * PetscSinScalar(PETSC_PI * i * 2. * h);
318 
319   /*
320      Restore vector
321   */
322   PetscCall(VecRestoreArray(u, &u_localptr));
323 
324   /*
325      Print debugging information if desired
326   */
327   if (appctx->debug) {
328     PetscCall(PetscPrintf(appctx->comm, "initial guess vector\n"));
329     PetscCall(VecView(u, PETSC_VIEWER_STDOUT_WORLD));
330   }
331 
332   return 0;
333 }
334 /* --------------------------------------------------------------------- */
335 /*
336    ExactSolution - Computes the exact solution at a given time.
337 
338    Input Parameters:
339    t - current time
340    solution - vector in which exact solution will be computed
341    appctx - user-defined application context
342 
343    Output Parameter:
344    solution - vector with the newly computed exact solution
345 */
346 PetscErrorCode ExactSolution(PetscReal t, Vec solution, AppCtx *appctx)
347 {
348   PetscScalar *s_localptr, h = appctx->h, ex1, ex2, sc1, sc2;
349   PetscInt     i, mybase, myend;
350 
351   /*
352      Determine starting and ending points of each processor's
353      range of grid values
354   */
355   PetscCall(VecGetOwnershipRange(solution, &mybase, &myend));
356 
357   /*
358      Get a pointer to vector data.
359   */
360   PetscCall(VecGetArray(solution, &s_localptr));
361 
362   /*
363      Simply write the solution directly into the array locations.
364      Alternatively, we culd use VecSetValues() or VecSetValuesLocal().
365   */
366   ex1 = PetscExpReal(-36. * PETSC_PI * PETSC_PI * t);
367   ex2 = PetscExpReal(-4. * PETSC_PI * PETSC_PI * t);
368   sc1 = PETSC_PI * 6. * h;
369   sc2 = PETSC_PI * 2. * h;
370   for (i = mybase; i < myend; i++) s_localptr[i - mybase] = PetscSinScalar(sc1 * (PetscReal)i) * ex1 + 3. * PetscSinScalar(sc2 * (PetscReal)i) * ex2;
371 
372   /*
373      Restore vector
374   */
375   PetscCall(VecRestoreArray(solution, &s_localptr));
376   return 0;
377 }
378 /* --------------------------------------------------------------------- */
379 /*
380    Monitor - User-provided routine to monitor the solution computed at
381    each timestep.  This example plots the solution and computes the
382    error in two different norms.
383 
384    Input Parameters:
385    ts     - the timestep context
386    step   - the count of the current step (with 0 meaning the
387              initial condition)
388    time   - the current time
389    u      - the solution at this timestep
390    ctx    - the user-provided context for this monitoring routine.
391             In this case we use the application context which contains
392             information about the problem size, workspace and the exact
393             solution.
394 */
395 PetscErrorCode Monitor(TS ts, PetscInt step, PetscReal time, Vec u, void *ctx)
396 {
397   AppCtx   *appctx = (AppCtx *)ctx; /* user-defined application context */
398   PetscReal norm_2, norm_max;
399 
400   /*
401      View a graph of the current iterate
402   */
403   PetscCall(VecView(u, appctx->viewer2));
404 
405   /*
406      Compute the exact solution
407   */
408   PetscCall(ExactSolution(time, appctx->solution, appctx));
409 
410   /*
411      Print debugging information if desired
412   */
413   if (appctx->debug) {
414     PetscCall(PetscPrintf(appctx->comm, "Computed solution vector\n"));
415     PetscCall(VecView(u, PETSC_VIEWER_STDOUT_WORLD));
416     PetscCall(PetscPrintf(appctx->comm, "Exact solution vector\n"));
417     PetscCall(VecView(appctx->solution, PETSC_VIEWER_STDOUT_WORLD));
418   }
419 
420   /*
421      Compute the 2-norm and max-norm of the error
422   */
423   PetscCall(VecAXPY(appctx->solution, -1.0, u));
424   PetscCall(VecNorm(appctx->solution, NORM_2, &norm_2));
425   norm_2 = PetscSqrtReal(appctx->h) * norm_2;
426   PetscCall(VecNorm(appctx->solution, NORM_MAX, &norm_max));
427   if (norm_2 < 1e-14) norm_2 = 0;
428   if (norm_max < 1e-14) norm_max = 0;
429 
430   /*
431      PetscPrintf() causes only the first processor in this
432      communicator to print the timestep information.
433   */
434   PetscCall(PetscPrintf(appctx->comm, "Timestep %" PetscInt_FMT ": time = %g 2-norm error = %g max norm error = %g\n", step, (double)time, (double)norm_2, (double)norm_max));
435   appctx->norm_2 += norm_2;
436   appctx->norm_max += norm_max;
437 
438   /*
439      View a graph of the error
440   */
441   PetscCall(VecView(appctx->solution, appctx->viewer1));
442 
443   /*
444      Print debugging information if desired
445   */
446   if (appctx->debug) {
447     PetscCall(PetscPrintf(appctx->comm, "Error vector\n"));
448     PetscCall(VecView(appctx->solution, PETSC_VIEWER_STDOUT_WORLD));
449   }
450 
451   return 0;
452 }
453 
454 /* --------------------------------------------------------------------- */
455 /*
456    RHSMatrixHeat - User-provided routine to compute the right-hand-side
457    matrix for the heat equation.
458 
459    Input Parameters:
460    ts - the TS context
461    t - current time
462    global_in - global input vector
463    dummy - optional user-defined context, as set by TSetRHSJacobian()
464 
465    Output Parameters:
466    AA - Jacobian matrix
467    BB - optionally different preconditioning matrix
468    str - flag indicating matrix structure
469 
470   Notes:
471   RHSMatrixHeat computes entries for the locally owned part of the system.
472    - Currently, all PETSc parallel matrix formats are partitioned by
473      contiguous chunks of rows across the processors.
474    - Each processor needs to insert only elements that it owns
475      locally (but any non-local elements will be sent to the
476      appropriate processor during matrix assembly).
477    - Always specify global row and columns of matrix entries when
478      using MatSetValues(); we could alternatively use MatSetValuesLocal().
479    - Here, we set all entries for a particular row at once.
480    - Note that MatSetValues() uses 0-based row and column numbers
481      in Fortran as well as in C.
482 */
483 PetscErrorCode RHSMatrixHeat(TS ts, PetscReal t, Vec X, Mat AA, Mat BB, void *ctx)
484 {
485   Mat         A      = AA;            /* Jacobian matrix */
486   AppCtx     *appctx = (AppCtx *)ctx; /* user-defined application context */
487   PetscInt    i, mstart, mend, idx[3];
488   PetscScalar v[3], stwo = -2. / (appctx->h * appctx->h), sone = -.5 * stwo;
489 
490   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
491      Compute entries for the locally owned part of the matrix
492      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
493 
494   PetscCall(MatGetOwnershipRange(A, &mstart, &mend));
495 
496   /*
497      Set matrix rows corresponding to boundary data
498   */
499 
500   if (mstart == 0) { /* first processor only */
501     v[0] = 1.0;
502     PetscCall(MatSetValues(A, 1, &mstart, 1, &mstart, v, INSERT_VALUES));
503     mstart++;
504   }
505 
506   if (mend == appctx->m) { /* last processor only */
507     mend--;
508     v[0] = 1.0;
509     PetscCall(MatSetValues(A, 1, &mend, 1, &mend, v, INSERT_VALUES));
510   }
511 
512   /*
513      Set matrix rows corresponding to interior data.  We construct the
514      matrix one row at a time.
515   */
516   v[0] = sone;
517   v[1] = stwo;
518   v[2] = sone;
519   for (i = mstart; i < mend; i++) {
520     idx[0] = i - 1;
521     idx[1] = i;
522     idx[2] = i + 1;
523     PetscCall(MatSetValues(A, 1, &i, 3, idx, v, INSERT_VALUES));
524   }
525 
526   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
527      Complete the matrix assembly process and set some options
528      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
529   /*
530      Assemble matrix, using the 2-step process:
531        MatAssemblyBegin(), MatAssemblyEnd()
532      Computations can be done while messages are in transition
533      by placing code between these two statements.
534   */
535   PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY));
536   PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY));
537 
538   /*
539      Set and option to indicate that we will never add a new nonzero location
540      to the matrix. If we do, it will generate an error.
541   */
542   PetscCall(MatSetOption(A, MAT_NEW_NONZERO_LOCATION_ERR, PETSC_TRUE));
543 
544   return 0;
545 }
546 
547 PetscErrorCode RHSFunctionHeat(TS ts, PetscReal t, Vec globalin, Vec globalout, void *ctx)
548 {
549   Mat A;
550 
551   PetscFunctionBeginUser;
552   PetscCall(TSGetRHSJacobian(ts, &A, NULL, NULL, &ctx));
553   PetscCall(RHSMatrixHeat(ts, t, globalin, A, NULL, ctx));
554   /* PetscCall(MatView(A,PETSC_VIEWER_STDOUT_WORLD)); */
555   PetscCall(MatMult(A, globalin, globalout));
556   PetscFunctionReturn(0);
557 }
558 
559 /*TEST
560 
561     test:
562       args: -ts_view -nox
563 
564     test:
565       suffix: 2
566       args: -ts_view -nox
567       nsize: 3
568 
569     test:
570       suffix: 3
571       args: -ts_view -nox -nonlinear
572 
573     test:
574       suffix: 4
575       args: -ts_view -nox -nonlinear
576       nsize: 3
577       timeoutfactor: 3
578 
579     test:
580       suffix: sundials
581       requires: sundials2
582       args: -nox -ts_type sundials -ts_max_steps 5 -nonlinear
583       nsize: 4
584 
585     test:
586       suffix: sundials_dense
587       requires: sundials2
588       args: -nox -ts_type sundials -ts_sundials_use_dense -ts_max_steps 5 -nonlinear
589       nsize: 1
590 
591 TEST*/
592