xref: /petsc/src/ts/tutorials/ex2.c (revision 34c645fd3b0199e05bec2fcc32d3597bfeb7f4f2)
1 static char help[] = "Solves a time-dependent nonlinear PDE. Uses implicit\n\
2 timestepping.  Runtime options include:\n\
3   -M <xg>, where <xg> = number of grid points\n\
4   -debug : Activate debugging printouts\n\
5   -nox   : Deactivate x-window graphics\n\n";
6 
7 /* ------------------------------------------------------------------------
8 
9    This program solves the PDE
10 
11                u * u_xx
12          u_t = ---------
13                2*(t+1)^2
14 
15     on the domain 0 <= x <= 1, with boundary conditions
16          u(t,0) = t + 1,  u(t,1) = 2*t + 2,
17     and initial condition
18          u(0,x) = 1 + x*x.
19 
20     The exact solution is:
21          u(t,x) = (1 + x*x) * (1 + t)
22 
23     Note that since the solution is linear in time and quadratic in x,
24     the finite difference scheme actually computes the "exact" solution.
25 
26     We use by default the backward Euler method.
27 
28   ------------------------------------------------------------------------- */
29 
30 /*
31    Include "petscts.h" to use the PETSc timestepping routines. Note that
32    this file automatically includes "petscsys.h" and other lower-level
33    PETSc include files.
34 
35    Include the "petscdmda.h" to allow us to use the distributed array data
36    structures to manage the parallel grid.
37 */
38 #include <petscts.h>
39 #include <petscdm.h>
40 #include <petscdmda.h>
41 #include <petscdraw.h>
42 
43 /*
44    User-defined application context - contains data needed by the
45    application-provided callback routines.
46 */
47 typedef struct {
48   MPI_Comm  comm;      /* communicator */
49   DM        da;        /* distributed array data structure */
50   Vec       localwork; /* local ghosted work vector */
51   Vec       u_local;   /* local ghosted approximate solution vector */
52   Vec       solution;  /* global exact solution vector */
53   PetscInt  m;         /* total number of grid points */
54   PetscReal h;         /* mesh width: h = 1/(m-1) */
55   PetscBool debug;     /* flag (1 indicates activation of debugging printouts) */
56 } AppCtx;
57 
58 /*
59    User-defined routines, provided below.
60 */
61 extern PetscErrorCode InitialConditions(Vec, AppCtx *);
62 extern PetscErrorCode RHSFunction(TS, PetscReal, Vec, Vec, void *);
63 extern PetscErrorCode RHSJacobian(TS, PetscReal, Vec, Mat, Mat, void *);
64 extern PetscErrorCode Monitor(TS, PetscInt, PetscReal, Vec, void *);
65 extern PetscErrorCode ExactSolution(PetscReal, Vec, AppCtx *);
66 
67 int main(int argc, char **argv)
68 {
69   AppCtx    appctx;               /* user-defined application context */
70   TS        ts;                   /* timestepping context */
71   Mat       A;                    /* Jacobian matrix data structure */
72   Vec       u;                    /* approximate solution vector */
73   PetscInt  time_steps_max = 100; /* default max timesteps */
74   PetscReal dt;
75   PetscReal time_total_max = 100.0; /* default max total time */
76   PetscBool mymonitor      = PETSC_FALSE;
77   PetscReal bounds[]       = {1.0, 3.3};
78 
79   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
80      Initialize program and set problem parameters
81      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
82 
83   PetscFunctionBeginUser;
84   PetscCall(PetscInitialize(&argc, &argv, (char *)0, help));
85   PetscCall(PetscViewerDrawSetBounds(PETSC_VIEWER_DRAW_(PETSC_COMM_WORLD), 1, bounds));
86 
87   appctx.comm = PETSC_COMM_WORLD;
88   appctx.m    = 60;
89 
90   PetscCall(PetscOptionsGetInt(NULL, NULL, "-M", &appctx.m, NULL));
91   PetscCall(PetscOptionsHasName(NULL, NULL, "-debug", &appctx.debug));
92   PetscCall(PetscOptionsHasName(NULL, NULL, "-mymonitor", &mymonitor));
93 
94   appctx.h = 1.0 / (appctx.m - 1.0);
95 
96   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
97      Create vector data structures
98      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
99 
100   /*
101      Create distributed array (DMDA) to manage parallel grid and vectors
102      and to set up the ghost point communication pattern.  There are M
103      total grid values spread equally among all the processors.
104   */
105   PetscCall(DMDACreate1d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, appctx.m, 1, 1, NULL, &appctx.da));
106   PetscCall(DMSetFromOptions(appctx.da));
107   PetscCall(DMSetUp(appctx.da));
108 
109   /*
110      Extract global and local vectors from DMDA; we use these to store the
111      approximate solution.  Then duplicate these for remaining vectors that
112      have the same types.
113   */
114   PetscCall(DMCreateGlobalVector(appctx.da, &u));
115   PetscCall(DMCreateLocalVector(appctx.da, &appctx.u_local));
116 
117   /*
118      Create local work vector for use in evaluating right-hand-side function;
119      create global work vector for storing exact solution.
120   */
121   PetscCall(VecDuplicate(appctx.u_local, &appctx.localwork));
122   PetscCall(VecDuplicate(u, &appctx.solution));
123 
124   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
125      Create timestepping solver context; set callback routine for
126      right-hand-side function evaluation.
127      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
128 
129   PetscCall(TSCreate(PETSC_COMM_WORLD, &ts));
130   PetscCall(TSSetProblemType(ts, TS_NONLINEAR));
131   PetscCall(TSSetRHSFunction(ts, NULL, RHSFunction, &appctx));
132 
133   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
134      Set optional user-defined monitoring routine
135      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
136 
137   if (mymonitor) PetscCall(TSMonitorSet(ts, Monitor, &appctx, NULL));
138 
139   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
140      For nonlinear problems, the user can provide a Jacobian evaluation
141      routine (or use a finite differencing approximation).
142 
143      Create matrix data structure; set Jacobian evaluation routine.
144      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
145 
146   PetscCall(MatCreate(PETSC_COMM_WORLD, &A));
147   PetscCall(MatSetSizes(A, PETSC_DECIDE, PETSC_DECIDE, appctx.m, appctx.m));
148   PetscCall(MatSetFromOptions(A));
149   PetscCall(MatSetUp(A));
150   PetscCall(TSSetRHSJacobian(ts, A, A, RHSJacobian, &appctx));
151 
152   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
153      Set solution vector and initial timestep
154      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
155 
156   dt = appctx.h / 2.0;
157   PetscCall(TSSetTimeStep(ts, dt));
158 
159   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
160      Customize timestepping solver:
161        - Set the solution method to be the Backward Euler method.
162        - Set timestepping duration info
163      Then set runtime options, which can override these defaults.
164      For example,
165           -ts_max_steps <maxsteps> -ts_max_time <maxtime>
166      to override the defaults set by TSSetMaxSteps()/TSSetMaxTime().
167      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
168 
169   PetscCall(TSSetType(ts, TSBEULER));
170   PetscCall(TSSetMaxSteps(ts, time_steps_max));
171   PetscCall(TSSetMaxTime(ts, time_total_max));
172   PetscCall(TSSetExactFinalTime(ts, TS_EXACTFINALTIME_STEPOVER));
173   PetscCall(TSSetFromOptions(ts));
174 
175   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
176      Solve the problem
177      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
178 
179   /*
180      Evaluate initial conditions
181   */
182   PetscCall(InitialConditions(u, &appctx));
183 
184   /*
185      Run the timestepping solver
186   */
187   PetscCall(TSSolve(ts, u));
188 
189   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
190      Free work space.  All PETSc objects should be destroyed when they
191      are no longer needed.
192      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
193 
194   PetscCall(TSDestroy(&ts));
195   PetscCall(VecDestroy(&u));
196   PetscCall(MatDestroy(&A));
197   PetscCall(DMDestroy(&appctx.da));
198   PetscCall(VecDestroy(&appctx.localwork));
199   PetscCall(VecDestroy(&appctx.solution));
200   PetscCall(VecDestroy(&appctx.u_local));
201 
202   /*
203      Always call PetscFinalize() before exiting a program.  This routine
204        - finalizes the PETSc libraries as well as MPI
205        - provides summary and diagnostic information if certain runtime
206          options are chosen (e.g., -log_view).
207   */
208   PetscCall(PetscFinalize());
209   return 0;
210 }
211 /* --------------------------------------------------------------------- */
212 /*
213    InitialConditions - Computes the solution at the initial time.
214 
215    Input Parameters:
216    u - uninitialized solution vector (global)
217    appctx - user-defined application context
218 
219    Output Parameter:
220    u - vector with solution at initial time (global)
221 */
222 PetscErrorCode InitialConditions(Vec u, AppCtx *appctx)
223 {
224   PetscScalar *u_localptr, h = appctx->h, x;
225   PetscInt     i, mybase, myend;
226 
227   PetscFunctionBeginUser;
228   /*
229      Determine starting point of each processor's range of
230      grid values.
231   */
232   PetscCall(VecGetOwnershipRange(u, &mybase, &myend));
233 
234   /*
235     Get a pointer to vector data.
236     - For default PETSc vectors, VecGetArray() returns a pointer to
237       the data array.  Otherwise, the routine is implementation dependent.
238     - You MUST call VecRestoreArray() when you no longer need access to
239       the array.
240     - Note that the Fortran interface to VecGetArray() differs from the
241       C version.  See the users manual for details.
242   */
243   PetscCall(VecGetArray(u, &u_localptr));
244 
245   /*
246      We initialize the solution array by simply writing the solution
247      directly into the array locations.  Alternatively, we could use
248      VecSetValues() or VecSetValuesLocal().
249   */
250   for (i = mybase; i < myend; i++) {
251     x                      = h * (PetscReal)i; /* current location in global grid */
252     u_localptr[i - mybase] = 1.0 + x * x;
253   }
254 
255   /*
256      Restore vector
257   */
258   PetscCall(VecRestoreArray(u, &u_localptr));
259 
260   /*
261      Print debugging information if desired
262   */
263   if (appctx->debug) {
264     PetscCall(PetscPrintf(appctx->comm, "initial guess vector\n"));
265     PetscCall(VecView(u, PETSC_VIEWER_STDOUT_WORLD));
266   }
267   PetscFunctionReturn(PETSC_SUCCESS);
268 }
269 /* --------------------------------------------------------------------- */
270 /*
271    ExactSolution - Computes the exact solution at a given time.
272 
273    Input Parameters:
274    t - current time
275    solution - vector in which exact solution will be computed
276    appctx - user-defined application context
277 
278    Output Parameter:
279    solution - vector with the newly computed exact solution
280 */
281 PetscErrorCode ExactSolution(PetscReal t, Vec solution, AppCtx *appctx)
282 {
283   PetscScalar *s_localptr, h = appctx->h, x;
284   PetscInt     i, mybase, myend;
285 
286   PetscFunctionBeginUser;
287   /*
288      Determine starting and ending points of each processor's
289      range of grid values
290   */
291   PetscCall(VecGetOwnershipRange(solution, &mybase, &myend));
292 
293   /*
294      Get a pointer to vector data.
295   */
296   PetscCall(VecGetArray(solution, &s_localptr));
297 
298   /*
299      Simply write the solution directly into the array locations.
300      Alternatively, we could use VecSetValues() or VecSetValuesLocal().
301   */
302   for (i = mybase; i < myend; i++) {
303     x                      = h * (PetscReal)i;
304     s_localptr[i - mybase] = (t + 1.0) * (1.0 + x * x);
305   }
306 
307   /*
308      Restore vector
309   */
310   PetscCall(VecRestoreArray(solution, &s_localptr));
311   PetscFunctionReturn(PETSC_SUCCESS);
312 }
313 /* --------------------------------------------------------------------- */
314 /*
315    Monitor - User-provided routine to monitor the solution computed at
316    each timestep.  This example plots the solution and computes the
317    error in two different norms.
318 
319    Input Parameters:
320    ts     - the timestep context
321    step   - the count of the current step (with 0 meaning the
322             initial condition)
323    time   - the current time
324    u      - the solution at this timestep
325    ctx    - the user-provided context for this monitoring routine.
326             In this case we use the application context which contains
327             information about the problem size, workspace and the exact
328             solution.
329 */
330 PetscErrorCode Monitor(TS ts, PetscInt step, PetscReal time, Vec u, void *ctx)
331 {
332   AppCtx   *appctx = (AppCtx *)ctx; /* user-defined application context */
333   PetscReal en2, en2s, enmax;
334   PetscDraw draw;
335 
336   PetscFunctionBeginUser;
337   /*
338      We use the default X Windows viewer
339              PETSC_VIEWER_DRAW_(appctx->comm)
340      that is associated with the current communicator. This saves
341      the effort of calling PetscViewerDrawOpen() to create the window.
342      Note that if we wished to plot several items in separate windows we
343      would create each viewer with PetscViewerDrawOpen() and store them in
344      the application context, appctx.
345 
346      PetscReal buffering makes graphics look better.
347   */
348   PetscCall(PetscViewerDrawGetDraw(PETSC_VIEWER_DRAW_(appctx->comm), 0, &draw));
349   PetscCall(PetscDrawSetDoubleBuffer(draw));
350   PetscCall(VecView(u, PETSC_VIEWER_DRAW_(appctx->comm)));
351 
352   /*
353      Compute the exact solution at this timestep
354   */
355   PetscCall(ExactSolution(time, appctx->solution, appctx));
356 
357   /*
358      Print debugging information if desired
359   */
360   if (appctx->debug) {
361     PetscCall(PetscPrintf(appctx->comm, "Computed solution vector\n"));
362     PetscCall(VecView(u, PETSC_VIEWER_STDOUT_WORLD));
363     PetscCall(PetscPrintf(appctx->comm, "Exact solution vector\n"));
364     PetscCall(VecView(appctx->solution, PETSC_VIEWER_STDOUT_WORLD));
365   }
366 
367   /*
368      Compute the 2-norm and max-norm of the error
369   */
370   PetscCall(VecAXPY(appctx->solution, -1.0, u));
371   PetscCall(VecNorm(appctx->solution, NORM_2, &en2));
372   en2s = PetscSqrtReal(appctx->h) * en2; /* scale the 2-norm by the grid spacing */
373   PetscCall(VecNorm(appctx->solution, NORM_MAX, &enmax));
374 
375   /*
376      PetscPrintf() causes only the first processor in this
377      communicator to print the timestep information.
378   */
379   PetscCall(PetscPrintf(appctx->comm, "Timestep %" PetscInt_FMT ": time = %g 2-norm error = %g  max norm error = %g\n", step, (double)time, (double)en2s, (double)enmax));
380 
381   /*
382      Print debugging information if desired
383   */
384   if (appctx->debug) {
385     PetscCall(PetscPrintf(appctx->comm, "Error vector\n"));
386     PetscCall(VecView(appctx->solution, PETSC_VIEWER_STDOUT_WORLD));
387   }
388   PetscFunctionReturn(PETSC_SUCCESS);
389 }
390 /* --------------------------------------------------------------------- */
391 /*
392    RHSFunction - User-provided routine that evalues the right-hand-side
393    function of the ODE.  This routine is set in the main program by
394    calling TSSetRHSFunction().  We compute:
395           global_out = F(global_in)
396 
397    Input Parameters:
398    ts         - timesteping context
399    t          - current time
400    global_in  - vector containing the current iterate
401    ctx        - (optional) user-provided context for function evaluation.
402                 In this case we use the appctx defined above.
403 
404    Output Parameter:
405    global_out - vector containing the newly evaluated function
406 */
407 PetscErrorCode RHSFunction(TS ts, PetscReal t, Vec global_in, Vec global_out, void *ctx)
408 {
409   AppCtx            *appctx    = (AppCtx *)ctx;     /* user-defined application context */
410   DM                 da        = appctx->da;        /* distributed array */
411   Vec                local_in  = appctx->u_local;   /* local ghosted input vector */
412   Vec                localwork = appctx->localwork; /* local ghosted work vector */
413   PetscInt           i, localsize;
414   PetscMPIInt        rank, size;
415   PetscScalar       *copyptr, sc;
416   const PetscScalar *localptr;
417 
418   PetscFunctionBeginUser;
419   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
420      Get ready for local function computations
421      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
422   /*
423      Scatter ghost points to local vector, using the 2-step process
424         DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
425      By placing code between these two statements, computations can be
426      done while messages are in transition.
427   */
428   PetscCall(DMGlobalToLocalBegin(da, global_in, INSERT_VALUES, local_in));
429   PetscCall(DMGlobalToLocalEnd(da, global_in, INSERT_VALUES, local_in));
430 
431   /*
432       Access directly the values in our local INPUT work array
433   */
434   PetscCall(VecGetArrayRead(local_in, &localptr));
435 
436   /*
437       Access directly the values in our local OUTPUT work array
438   */
439   PetscCall(VecGetArray(localwork, &copyptr));
440 
441   sc = 1.0 / (appctx->h * appctx->h * 2.0 * (1.0 + t) * (1.0 + t));
442 
443   /*
444       Evaluate our function on the nodes owned by this processor
445   */
446   PetscCall(VecGetLocalSize(local_in, &localsize));
447 
448   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
449      Compute entries for the locally owned part
450      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
451 
452   /*
453      Handle boundary conditions: This is done by using the boundary condition
454         u(t,boundary) = g(t,boundary)
455      for some function g. Now take the derivative with respect to t to obtain
456         u_{t}(t,boundary) = g_{t}(t,boundary)
457 
458      In our case, u(t,0) = t + 1, so that u_{t}(t,0) = 1
459              and  u(t,1) = 2t+ 2, so that u_{t}(t,1) = 2
460   */
461   PetscCallMPI(MPI_Comm_rank(appctx->comm, &rank));
462   PetscCallMPI(MPI_Comm_size(appctx->comm, &size));
463   if (rank == 0) copyptr[0] = 1.0;
464   if (rank == size - 1) copyptr[localsize - 1] = 2.0;
465 
466   /*
467      Handle the interior nodes where the PDE is replace by finite
468      difference operators.
469   */
470   for (i = 1; i < localsize - 1; i++) copyptr[i] = localptr[i] * sc * (localptr[i + 1] + localptr[i - 1] - 2.0 * localptr[i]);
471 
472   /*
473      Restore vectors
474   */
475   PetscCall(VecRestoreArrayRead(local_in, &localptr));
476   PetscCall(VecRestoreArray(localwork, &copyptr));
477 
478   /*
479      Insert values from the local OUTPUT vector into the global
480      output vector
481   */
482   PetscCall(DMLocalToGlobalBegin(da, localwork, INSERT_VALUES, global_out));
483   PetscCall(DMLocalToGlobalEnd(da, localwork, INSERT_VALUES, global_out));
484 
485   /* Print debugging information if desired */
486   if (appctx->debug) {
487     PetscCall(PetscPrintf(appctx->comm, "RHS function vector\n"));
488     PetscCall(VecView(global_out, PETSC_VIEWER_STDOUT_WORLD));
489   }
490   PetscFunctionReturn(PETSC_SUCCESS);
491 }
492 /* --------------------------------------------------------------------- */
493 /*
494    RHSJacobian - User-provided routine to compute the Jacobian of
495    the nonlinear right-hand-side function of the ODE.
496 
497    Input Parameters:
498    ts - the TS context
499    t - current time
500    global_in - global input vector
501    dummy - optional user-defined context, as set by TSetRHSJacobian()
502 
503    Output Parameters:
504    AA - Jacobian matrix
505    BB - optionally different preconditioning matrix
506    str - flag indicating matrix structure
507 
508   Notes:
509   RHSJacobian computes entries for the locally owned part of the Jacobian.
510    - Currently, all PETSc parallel matrix formats are partitioned by
511      contiguous chunks of rows across the processors.
512    - Each processor needs to insert only elements that it owns
513      locally (but any non-local elements will be sent to the
514      appropriate processor during matrix assembly).
515    - Always specify global row and columns of matrix entries when
516      using MatSetValues().
517    - Here, we set all entries for a particular row at once.
518    - Note that MatSetValues() uses 0-based row and column numbers
519      in Fortran as well as in C.
520 */
521 PetscErrorCode RHSJacobian(TS ts, PetscReal t, Vec global_in, Mat AA, Mat BB, void *ctx)
522 {
523   AppCtx            *appctx   = (AppCtx *)ctx;   /* user-defined application context */
524   Vec                local_in = appctx->u_local; /* local ghosted input vector */
525   DM                 da       = appctx->da;      /* distributed array */
526   PetscScalar        v[3], sc;
527   const PetscScalar *localptr;
528   PetscInt           i, mstart, mend, mstarts, mends, idx[3], is;
529 
530   PetscFunctionBeginUser;
531   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
532      Get ready for local Jacobian computations
533      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
534   /*
535      Scatter ghost points to local vector, using the 2-step process
536         DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
537      By placing code between these two statements, computations can be
538      done while messages are in transition.
539   */
540   PetscCall(DMGlobalToLocalBegin(da, global_in, INSERT_VALUES, local_in));
541   PetscCall(DMGlobalToLocalEnd(da, global_in, INSERT_VALUES, local_in));
542 
543   /*
544      Get pointer to vector data
545   */
546   PetscCall(VecGetArrayRead(local_in, &localptr));
547 
548   /*
549      Get starting and ending locally owned rows of the matrix
550   */
551   PetscCall(MatGetOwnershipRange(BB, &mstarts, &mends));
552   mstart = mstarts;
553   mend   = mends;
554 
555   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
556      Compute entries for the locally owned part of the Jacobian.
557       - Currently, all PETSc parallel matrix formats are partitioned by
558         contiguous chunks of rows across the processors.
559       - Each processor needs to insert only elements that it owns
560         locally (but any non-local elements will be sent to the
561         appropriate processor during matrix assembly).
562       - Here, we set all entries for a particular row at once.
563       - We can set matrix entries either using either
564         MatSetValuesLocal() or MatSetValues().
565      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
566 
567   /*
568      Set matrix rows corresponding to boundary data
569   */
570   if (mstart == 0) {
571     v[0] = 0.0;
572     PetscCall(MatSetValues(BB, 1, &mstart, 1, &mstart, v, INSERT_VALUES));
573     mstart++;
574   }
575   if (mend == appctx->m) {
576     mend--;
577     v[0] = 0.0;
578     PetscCall(MatSetValues(BB, 1, &mend, 1, &mend, v, INSERT_VALUES));
579   }
580 
581   /*
582      Set matrix rows corresponding to interior data.  We construct the
583      matrix one row at a time.
584   */
585   sc = 1.0 / (appctx->h * appctx->h * 2.0 * (1.0 + t) * (1.0 + t));
586   for (i = mstart; i < mend; i++) {
587     idx[0] = i - 1;
588     idx[1] = i;
589     idx[2] = i + 1;
590     is     = i - mstart + 1;
591     v[0]   = sc * localptr[is];
592     v[1]   = sc * (localptr[is + 1] + localptr[is - 1] - 4.0 * localptr[is]);
593     v[2]   = sc * localptr[is];
594     PetscCall(MatSetValues(BB, 1, &i, 3, idx, v, INSERT_VALUES));
595   }
596 
597   /*
598      Restore vector
599   */
600   PetscCall(VecRestoreArrayRead(local_in, &localptr));
601 
602   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
603      Complete the matrix assembly process and set some options
604      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
605   /*
606      Assemble matrix, using the 2-step process:
607        MatAssemblyBegin(), MatAssemblyEnd()
608      Computations can be done while messages are in transition
609      by placing code between these two statements.
610   */
611   PetscCall(MatAssemblyBegin(BB, MAT_FINAL_ASSEMBLY));
612   PetscCall(MatAssemblyEnd(BB, MAT_FINAL_ASSEMBLY));
613   if (BB != AA) {
614     PetscCall(MatAssemblyBegin(AA, MAT_FINAL_ASSEMBLY));
615     PetscCall(MatAssemblyEnd(AA, MAT_FINAL_ASSEMBLY));
616   }
617 
618   /*
619      Set and option to indicate that we will never add a new nonzero location
620      to the matrix. If we do, it will generate an error.
621   */
622   PetscCall(MatSetOption(BB, MAT_NEW_NONZERO_LOCATION_ERR, PETSC_TRUE));
623   PetscFunctionReturn(PETSC_SUCCESS);
624 }
625 
626 /*TEST
627 
628     test:
629       args: -nox -ts_dt 10 -mymonitor
630       nsize: 2
631       requires: !single
632 
633     test:
634       suffix: tut_1
635       nsize: 1
636       args: -ts_max_steps 10 -ts_monitor
637 
638     test:
639       suffix: tut_2
640       nsize: 4
641       args: -ts_max_steps 10 -ts_monitor -snes_monitor -ksp_monitor
642       # GEMV sensitive to single
643       args: -vec_mdot_use_gemv 0 -vec_maxpy_use_gemv 0
644 
645     test:
646       suffix: tut_3
647       nsize: 4
648       args: -ts_max_steps 10 -ts_monitor -M 128
649 
650 TEST*/
651