xref: /petsc/src/ts/tutorials/ex21.c (revision 66af8762ec03dbef0e079729eb2a1734a35ed7ff)
1 static char help[] = "Solves a time-dependent nonlinear PDE with lower and upper bounds on the interior grid points. Uses implicit\n\
2 timestepping.  Runtime options include:\n\
3   -M <xg>, where <xg> = number of grid points\n\
4   -debug : Activate debugging printouts\n\
5   -nox   : Deactivate x-window graphics\n\
6   -ul   : lower bound\n\
7   -uh  : upper bound\n\n";
8 
9 /* ------------------------------------------------------------------------
10 
11    This is a variation of ex2.c to solve the PDE
12 
13                u * u_xx
14          u_t = ---------
15                2*(t+1)^2
16 
17     with box constraints on the interior grid points
18     ul <= u(t,x) <= uh with x != 0,1
19     on the domain 0 <= x <= 1, with boundary conditions
20          u(t,0) = t + 1,  u(t,1) = 2*t + 2,
21     and initial condition
22          u(0,x) = 1 + x*x.
23 
24     The exact solution is:
25          u(t,x) = (1 + x*x) * (1 + t)
26 
27     We use by default the backward Euler method.
28 
29   ------------------------------------------------------------------------- */
30 
31 /*
32    Include "petscts.h" to use the PETSc timestepping routines. Note that
33    this file automatically includes "petscsys.h" and other lower-level
34    PETSc include files.
35 
36    Include the "petscdmda.h" to allow us to use the distributed array data
37    structures to manage the parallel grid.
38 */
39 #include <petscts.h>
40 #include <petscdm.h>
41 #include <petscdmda.h>
42 #include <petscdraw.h>
43 
44 /*
45    User-defined application context - contains data needed by the
46    application-provided callback routines.
47 */
48 typedef struct {
49   MPI_Comm  comm;      /* communicator */
50   DM        da;        /* distributed array data structure */
51   Vec       localwork; /* local ghosted work vector */
52   Vec       u_local;   /* local ghosted approximate solution vector */
53   Vec       solution;  /* global exact solution vector */
54   PetscInt  m;         /* total number of grid points */
55   PetscReal h;         /* mesh width: h = 1/(m-1) */
56   PetscBool debug;     /* flag (1 indicates activation of debugging printouts) */
57 } AppCtx;
58 
59 /*
60    User-defined routines, provided below.
61 */
62 extern PetscErrorCode InitialConditions(Vec, AppCtx *);
63 extern PetscErrorCode RHSFunction(TS, PetscReal, Vec, Vec, void *);
64 extern PetscErrorCode RHSJacobian(TS, PetscReal, Vec, Mat, Mat, void *);
65 extern PetscErrorCode Monitor(TS, PetscInt, PetscReal, Vec, void *);
66 extern PetscErrorCode ExactSolution(PetscReal, Vec, AppCtx *);
67 extern PetscErrorCode SetBounds(Vec, Vec, PetscScalar, PetscScalar, AppCtx *);
68 
69 int main(int argc, char **argv)
70 {
71   AppCtx      appctx;                /* user-defined application context */
72   TS          ts;                    /* timestepping context */
73   Mat         A;                     /* Jacobian matrix data structure */
74   Vec         u;                     /* approximate solution vector */
75   Vec         r;                     /* residual vector */
76   PetscInt    time_steps_max = 1000; /* default max timesteps */
77   PetscReal   dt;
78   PetscReal   time_total_max = 100.0; /* default max total time */
79   Vec         xl, xu;                 /* Lower and upper bounds on variables */
80   PetscScalar ul = 0.0, uh = 3.0;
81   PetscBool   mymonitor;
82   PetscReal   bounds[] = {1.0, 3.3};
83 
84   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
85      Initialize program and set problem parameters
86      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
87 
88   PetscFunctionBeginUser;
89   PetscCall(PetscInitialize(&argc, &argv, (char *)0, help));
90   PetscCall(PetscViewerDrawSetBounds(PETSC_VIEWER_DRAW_(PETSC_COMM_WORLD), 1, bounds));
91 
92   appctx.comm = PETSC_COMM_WORLD;
93   appctx.m    = 60;
94   PetscCall(PetscOptionsGetInt(NULL, NULL, "-M", &appctx.m, NULL));
95   PetscCall(PetscOptionsGetScalar(NULL, NULL, "-ul", &ul, NULL));
96   PetscCall(PetscOptionsGetScalar(NULL, NULL, "-uh", &uh, NULL));
97   PetscCall(PetscOptionsHasName(NULL, NULL, "-debug", &appctx.debug));
98   PetscCall(PetscOptionsHasName(NULL, NULL, "-mymonitor", &mymonitor));
99   appctx.h = 1.0 / (appctx.m - 1.0);
100 
101   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
102      Create vector data structures
103      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
104 
105   /*
106      Create distributed array (DMDA) to manage parallel grid and vectors
107      and to set up the ghost point communication pattern.  There are M
108      total grid values spread equally among all the processors.
109   */
110   PetscCall(DMDACreate1d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, appctx.m, 1, 1, NULL, &appctx.da));
111   PetscCall(DMSetFromOptions(appctx.da));
112   PetscCall(DMSetUp(appctx.da));
113 
114   /*
115      Extract global and local vectors from DMDA; we use these to store the
116      approximate solution.  Then duplicate these for remaining vectors that
117      have the same types.
118   */
119   PetscCall(DMCreateGlobalVector(appctx.da, &u));
120   PetscCall(DMCreateLocalVector(appctx.da, &appctx.u_local));
121 
122   /*
123      Create local work vector for use in evaluating right-hand-side function;
124      create global work vector for storing exact solution.
125   */
126   PetscCall(VecDuplicate(appctx.u_local, &appctx.localwork));
127   PetscCall(VecDuplicate(u, &appctx.solution));
128 
129   /* Create residual vector */
130   PetscCall(VecDuplicate(u, &r));
131   /* Create lower and upper bound vectors */
132   PetscCall(VecDuplicate(u, &xl));
133   PetscCall(VecDuplicate(u, &xu));
134   PetscCall(SetBounds(xl, xu, ul, uh, &appctx));
135 
136   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
137      Create timestepping solver context; set callback routine for
138      right-hand-side function evaluation.
139      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
140 
141   PetscCall(TSCreate(PETSC_COMM_WORLD, &ts));
142   PetscCall(TSSetProblemType(ts, TS_NONLINEAR));
143   PetscCall(TSSetRHSFunction(ts, r, RHSFunction, &appctx));
144 
145   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
146      Set optional user-defined monitoring routine
147      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
148 
149   if (mymonitor) PetscCall(TSMonitorSet(ts, Monitor, &appctx, NULL));
150 
151   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
152      For nonlinear problems, the user can provide a Jacobian evaluation
153      routine (or use a finite differencing approximation).
154 
155      Create matrix data structure; set Jacobian evaluation routine.
156      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
157 
158   PetscCall(MatCreate(PETSC_COMM_WORLD, &A));
159   PetscCall(MatSetSizes(A, PETSC_DECIDE, PETSC_DECIDE, appctx.m, appctx.m));
160   PetscCall(MatSetFromOptions(A));
161   PetscCall(MatSetUp(A));
162   PetscCall(TSSetRHSJacobian(ts, A, A, RHSJacobian, &appctx));
163 
164   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
165      Set solution vector and initial timestep
166      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
167 
168   dt = appctx.h / 2.0;
169   PetscCall(TSSetTimeStep(ts, dt));
170 
171   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
172      Customize timestepping solver:
173        - Set the solution method to be the Backward Euler method.
174        - Set timestepping duration info
175      Then set runtime options, which can override these defaults.
176      For example,
177           -ts_max_steps <maxsteps> -ts_max_time <maxtime>
178      to override the defaults set by TSSetMaxSteps()/TSSetMaxTime().
179      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
180 
181   PetscCall(TSSetType(ts, TSBEULER));
182   PetscCall(TSSetMaxSteps(ts, time_steps_max));
183   PetscCall(TSSetMaxTime(ts, time_total_max));
184   PetscCall(TSSetExactFinalTime(ts, TS_EXACTFINALTIME_STEPOVER));
185   /* Set lower and upper bound on the solution vector for each time step */
186   PetscCall(TSVISetVariableBounds(ts, xl, xu));
187   PetscCall(TSSetFromOptions(ts));
188 
189   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
190      Solve the problem
191      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
192 
193   /*
194      Evaluate initial conditions
195   */
196   PetscCall(InitialConditions(u, &appctx));
197 
198   /*
199      Run the timestepping solver
200   */
201   PetscCall(TSSolve(ts, u));
202 
203   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
204      Free work space.  All PETSc objects should be destroyed when they
205      are no longer needed.
206      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
207 
208   PetscCall(VecDestroy(&r));
209   PetscCall(VecDestroy(&xl));
210   PetscCall(VecDestroy(&xu));
211   PetscCall(TSDestroy(&ts));
212   PetscCall(VecDestroy(&u));
213   PetscCall(MatDestroy(&A));
214   PetscCall(DMDestroy(&appctx.da));
215   PetscCall(VecDestroy(&appctx.localwork));
216   PetscCall(VecDestroy(&appctx.solution));
217   PetscCall(VecDestroy(&appctx.u_local));
218 
219   /*
220      Always call PetscFinalize() before exiting a program.  This routine
221        - finalizes the PETSc libraries as well as MPI
222        - provides summary and diagnostic information if certain runtime
223          options are chosen (e.g., -log_view).
224   */
225   PetscCall(PetscFinalize());
226   return 0;
227 }
228 /* --------------------------------------------------------------------- */
229 /*
230    InitialConditions - Computes the solution at the initial time.
231 
232    Input Parameters:
233    u - uninitialized solution vector (global)
234    appctx - user-defined application context
235 
236    Output Parameter:
237    u - vector with solution at initial time (global)
238 */
239 PetscErrorCode InitialConditions(Vec u, AppCtx *appctx)
240 {
241   PetscScalar *u_localptr, h = appctx->h, x;
242   PetscInt     i, mybase, myend;
243 
244   PetscFunctionBeginUser;
245   /*
246      Determine starting point of each processor's range of
247      grid values.
248   */
249   PetscCall(VecGetOwnershipRange(u, &mybase, &myend));
250 
251   /*
252     Get a pointer to vector data.
253     - For default PETSc vectors, VecGetArray() returns a pointer to
254       the data array.  Otherwise, the routine is implementation dependent.
255     - You MUST call VecRestoreArray() when you no longer need access to
256       the array.
257     - Note that the Fortran interface to VecGetArray() differs from the
258       C version.  See the users manual for details.
259   */
260   PetscCall(VecGetArray(u, &u_localptr));
261 
262   /*
263      We initialize the solution array by simply writing the solution
264      directly into the array locations.  Alternatively, we could use
265      VecSetValues() or VecSetValuesLocal().
266   */
267   for (i = mybase; i < myend; i++) {
268     x                      = h * (PetscReal)i; /* current location in global grid */
269     u_localptr[i - mybase] = 1.0 + x * x;
270   }
271 
272   /*
273      Restore vector
274   */
275   PetscCall(VecRestoreArray(u, &u_localptr));
276 
277   /*
278      Print debugging information if desired
279   */
280   if (appctx->debug) {
281     PetscCall(PetscPrintf(appctx->comm, "initial guess vector\n"));
282     PetscCall(VecView(u, PETSC_VIEWER_STDOUT_WORLD));
283   }
284 
285   PetscFunctionReturn(PETSC_SUCCESS);
286 }
287 
288 /* --------------------------------------------------------------------- */
289 /*
290   SetBounds - Sets the lower and upper bounds on the interior points
291 
292   Input parameters:
293   xl - vector of lower bounds
294   xu - vector of upper bounds
295   ul - constant lower bound for all variables
296   uh - constant upper bound for all variables
297   appctx - Application context
298  */
299 PetscErrorCode SetBounds(Vec xl, Vec xu, PetscScalar ul, PetscScalar uh, AppCtx *appctx)
300 {
301   PetscScalar *l, *u;
302   PetscMPIInt  rank, size;
303   PetscInt     localsize;
304 
305   PetscFunctionBeginUser;
306   PetscCall(VecSet(xl, ul));
307   PetscCall(VecSet(xu, uh));
308   PetscCall(VecGetLocalSize(xl, &localsize));
309   PetscCall(VecGetArray(xl, &l));
310   PetscCall(VecGetArray(xu, &u));
311 
312   PetscCallMPI(MPI_Comm_rank(appctx->comm, &rank));
313   PetscCallMPI(MPI_Comm_size(appctx->comm, &size));
314   if (rank == 0) {
315     l[0] = -PETSC_INFINITY;
316     u[0] = PETSC_INFINITY;
317   }
318   if (rank == size - 1) {
319     l[localsize - 1] = -PETSC_INFINITY;
320     u[localsize - 1] = PETSC_INFINITY;
321   }
322   PetscCall(VecRestoreArray(xl, &l));
323   PetscCall(VecRestoreArray(xu, &u));
324   PetscFunctionReturn(PETSC_SUCCESS);
325 }
326 
327 /* --------------------------------------------------------------------- */
328 /*
329    ExactSolution - Computes the exact solution at a given time.
330 
331    Input Parameters:
332    t - current time
333    solution - vector in which exact solution will be computed
334    appctx - user-defined application context
335 
336    Output Parameter:
337    solution - vector with the newly computed exact solution
338 */
339 PetscErrorCode ExactSolution(PetscReal t, Vec solution, AppCtx *appctx)
340 {
341   PetscScalar *s_localptr, h = appctx->h, x;
342   PetscInt     i, mybase, myend;
343 
344   PetscFunctionBeginUser;
345   /*
346      Determine starting and ending points of each processor's
347      range of grid values
348   */
349   PetscCall(VecGetOwnershipRange(solution, &mybase, &myend));
350 
351   /*
352      Get a pointer to vector data.
353   */
354   PetscCall(VecGetArray(solution, &s_localptr));
355 
356   /*
357      Simply write the solution directly into the array locations.
358      Alternatively, we could use VecSetValues() or VecSetValuesLocal().
359   */
360   for (i = mybase; i < myend; i++) {
361     x                      = h * (PetscReal)i;
362     s_localptr[i - mybase] = (t + 1.0) * (1.0 + x * x);
363   }
364 
365   /*
366      Restore vector
367   */
368   PetscCall(VecRestoreArray(solution, &s_localptr));
369   PetscFunctionReturn(PETSC_SUCCESS);
370 }
371 /* --------------------------------------------------------------------- */
372 /*
373    Monitor - User-provided routine to monitor the solution computed at
374    each timestep.  This example plots the solution and computes the
375    error in two different norms.
376 
377    Input Parameters:
378    ts     - the timestep context
379    step   - the count of the current step (with 0 meaning the
380             initial condition)
381    time   - the current time
382    u      - the solution at this timestep
383    ctx    - the user-provided context for this monitoring routine.
384             In this case we use the application context which contains
385             information about the problem size, workspace and the exact
386             solution.
387 */
388 PetscErrorCode Monitor(TS ts, PetscInt step, PetscReal time, Vec u, void *ctx)
389 {
390   AppCtx   *appctx = (AppCtx *)ctx; /* user-defined application context */
391   PetscReal en2, en2s, enmax;
392   PetscDraw draw;
393 
394   PetscFunctionBeginUser;
395   /*
396      We use the default X windows viewer
397              PETSC_VIEWER_DRAW_(appctx->comm)
398      that is associated with the current communicator. This saves
399      the effort of calling PetscViewerDrawOpen() to create the window.
400      Note that if we wished to plot several items in separate windows we
401      would create each viewer with PetscViewerDrawOpen() and store them in
402      the application context, appctx.
403 
404      PetscReal buffering makes graphics look better.
405   */
406   PetscCall(PetscViewerDrawGetDraw(PETSC_VIEWER_DRAW_(appctx->comm), 0, &draw));
407   PetscCall(PetscDrawSetDoubleBuffer(draw));
408   PetscCall(VecView(u, PETSC_VIEWER_DRAW_(appctx->comm)));
409 
410   /*
411      Compute the exact solution at this timestep
412   */
413   PetscCall(ExactSolution(time, appctx->solution, appctx));
414 
415   /*
416      Print debugging information if desired
417   */
418   if (appctx->debug) {
419     PetscCall(PetscPrintf(appctx->comm, "Computed solution vector\n"));
420     PetscCall(VecView(u, PETSC_VIEWER_STDOUT_WORLD));
421     PetscCall(PetscPrintf(appctx->comm, "Exact solution vector\n"));
422     PetscCall(VecView(appctx->solution, PETSC_VIEWER_STDOUT_WORLD));
423   }
424 
425   /*
426      Compute the 2-norm and max-norm of the error
427   */
428   PetscCall(VecAXPY(appctx->solution, -1.0, u));
429   PetscCall(VecNorm(appctx->solution, NORM_2, &en2));
430   en2s = PetscSqrtReal(appctx->h) * en2; /* scale the 2-norm by the grid spacing */
431   PetscCall(VecNorm(appctx->solution, NORM_MAX, &enmax));
432 
433   /*
434      PetscPrintf() causes only the first processor in this
435      communicator to print the timestep information.
436   */
437   PetscCall(PetscPrintf(appctx->comm, "Timestep %" PetscInt_FMT ": time = %g,2-norm error = %g, max norm error = %g\n", step, (double)time, (double)en2s, (double)enmax));
438 
439   /*
440      Print debugging information if desired
441    */
442   /*  if (appctx->debug) {
443      PetscCall(PetscPrintf(appctx->comm,"Error vector\n"));
444      PetscCall(VecView(appctx->solution,PETSC_VIEWER_STDOUT_WORLD));
445    } */
446   PetscFunctionReturn(PETSC_SUCCESS);
447 }
448 /* --------------------------------------------------------------------- */
449 /*
450    RHSFunction - User-provided routine that evalues the right-hand-side
451    function of the ODE.  This routine is set in the main program by
452    calling TSSetRHSFunction().  We compute:
453           global_out = F(global_in)
454 
455    Input Parameters:
456    ts         - timesteping context
457    t          - current time
458    global_in  - vector containing the current iterate
459    ctx        - (optional) user-provided context for function evaluation.
460                 In this case we use the appctx defined above.
461 
462    Output Parameter:
463    global_out - vector containing the newly evaluated function
464 */
465 PetscErrorCode RHSFunction(TS ts, PetscReal t, Vec global_in, Vec global_out, void *ctx)
466 {
467   AppCtx            *appctx    = (AppCtx *)ctx;     /* user-defined application context */
468   DM                 da        = appctx->da;        /* distributed array */
469   Vec                local_in  = appctx->u_local;   /* local ghosted input vector */
470   Vec                localwork = appctx->localwork; /* local ghosted work vector */
471   PetscInt           i, localsize;
472   PetscMPIInt        rank, size;
473   PetscScalar       *copyptr, sc;
474   const PetscScalar *localptr;
475 
476   PetscFunctionBeginUser;
477   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
478      Get ready for local function computations
479      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
480   /*
481      Scatter ghost points to local vector, using the 2-step process
482         DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
483      By placing code between these two statements, computations can be
484      done while messages are in transition.
485   */
486   PetscCall(DMGlobalToLocalBegin(da, global_in, INSERT_VALUES, local_in));
487   PetscCall(DMGlobalToLocalEnd(da, global_in, INSERT_VALUES, local_in));
488 
489   /*
490       Access directly the values in our local INPUT work array
491   */
492   PetscCall(VecGetArrayRead(local_in, &localptr));
493 
494   /*
495       Access directly the values in our local OUTPUT work array
496   */
497   PetscCall(VecGetArray(localwork, &copyptr));
498 
499   sc = 1.0 / (appctx->h * appctx->h * 2.0 * (1.0 + t) * (1.0 + t));
500 
501   /*
502       Evaluate our function on the nodes owned by this processor
503   */
504   PetscCall(VecGetLocalSize(local_in, &localsize));
505 
506   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
507      Compute entries for the locally owned part
508      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
509 
510   /*
511      Handle boundary conditions: This is done by using the boundary condition
512         u(t,boundary) = g(t,boundary)
513      for some function g. Now take the derivative with respect to t to obtain
514         u_{t}(t,boundary) = g_{t}(t,boundary)
515 
516      In our case, u(t,0) = t + 1, so that u_{t}(t,0) = 1
517              and  u(t,1) = 2t+ 2, so that u_{t}(t,1) = 2
518   */
519   PetscCallMPI(MPI_Comm_rank(appctx->comm, &rank));
520   PetscCallMPI(MPI_Comm_size(appctx->comm, &size));
521   if (rank == 0) copyptr[0] = 1.0;
522   if (rank == size - 1) copyptr[localsize - 1] = (t < .5) ? 2.0 : 0.0;
523 
524   /*
525      Handle the interior nodes where the PDE is replace by finite
526      difference operators.
527   */
528   for (i = 1; i < localsize - 1; i++) copyptr[i] = localptr[i] * sc * (localptr[i + 1] + localptr[i - 1] - 2.0 * localptr[i]);
529 
530   /*
531      Restore vectors
532   */
533   PetscCall(VecRestoreArrayRead(local_in, &localptr));
534   PetscCall(VecRestoreArray(localwork, &copyptr));
535 
536   /*
537      Insert values from the local OUTPUT vector into the global
538      output vector
539   */
540   PetscCall(DMLocalToGlobalBegin(da, localwork, INSERT_VALUES, global_out));
541   PetscCall(DMLocalToGlobalEnd(da, localwork, INSERT_VALUES, global_out));
542 
543   /* Print debugging information if desired */
544   /*  if (appctx->debug) {
545      PetscCall(PetscPrintf(appctx->comm,"RHS function vector\n"));
546      PetscCall(VecView(global_out,PETSC_VIEWER_STDOUT_WORLD));
547    } */
548 
549   PetscFunctionReturn(PETSC_SUCCESS);
550 }
551 /* --------------------------------------------------------------------- */
552 /*
553    RHSJacobian - User-provided routine to compute the Jacobian of
554    the nonlinear right-hand-side function of the ODE.
555 
556    Input Parameters:
557    ts - the TS context
558    t - current time
559    global_in - global input vector
560    dummy - optional user-defined context, as set by TSetRHSJacobian()
561 
562    Output Parameters:
563    AA - Jacobian matrix
564    BB - optionally different preconditioning matrix
565    str - flag indicating matrix structure
566 
567   Notes:
568   RHSJacobian computes entries for the locally owned part of the Jacobian.
569    - Currently, all PETSc parallel matrix formats are partitioned by
570      contiguous chunks of rows across the processors.
571    - Each processor needs to insert only elements that it owns
572      locally (but any non-local elements will be sent to the
573      appropriate processor during matrix assembly).
574    - Always specify global row and columns of matrix entries when
575      using MatSetValues().
576    - Here, we set all entries for a particular row at once.
577    - Note that MatSetValues() uses 0-based row and column numbers
578      in Fortran as well as in C.
579 */
580 PetscErrorCode RHSJacobian(TS ts, PetscReal t, Vec global_in, Mat AA, Mat B, void *ctx)
581 {
582   AppCtx            *appctx   = (AppCtx *)ctx;   /* user-defined application context */
583   Vec                local_in = appctx->u_local; /* local ghosted input vector */
584   DM                 da       = appctx->da;      /* distributed array */
585   PetscScalar        v[3], sc;
586   const PetscScalar *localptr;
587   PetscInt           i, mstart, mend, mstarts, mends, idx[3], is;
588 
589   PetscFunctionBeginUser;
590   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
591      Get ready for local Jacobian computations
592      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
593   /*
594      Scatter ghost points to local vector, using the 2-step process
595         DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
596      By placing code between these two statements, computations can be
597      done while messages are in transition.
598   */
599   PetscCall(DMGlobalToLocalBegin(da, global_in, INSERT_VALUES, local_in));
600   PetscCall(DMGlobalToLocalEnd(da, global_in, INSERT_VALUES, local_in));
601 
602   /*
603      Get pointer to vector data
604   */
605   PetscCall(VecGetArrayRead(local_in, &localptr));
606 
607   /*
608      Get starting and ending locally owned rows of the matrix
609   */
610   PetscCall(MatGetOwnershipRange(B, &mstarts, &mends));
611   mstart = mstarts;
612   mend   = mends;
613 
614   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
615      Compute entries for the locally owned part of the Jacobian.
616       - Currently, all PETSc parallel matrix formats are partitioned by
617         contiguous chunks of rows across the processors.
618       - Each processor needs to insert only elements that it owns
619         locally (but any non-local elements will be sent to the
620         appropriate processor during matrix assembly).
621       - Here, we set all entries for a particular row at once.
622       - We can set matrix entries either using either
623         MatSetValuesLocal() or MatSetValues().
624      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
625 
626   /*
627      Set matrix rows corresponding to boundary data
628   */
629   if (mstart == 0) {
630     v[0] = 0.0;
631     PetscCall(MatSetValues(B, 1, &mstart, 1, &mstart, v, INSERT_VALUES));
632     mstart++;
633   }
634   if (mend == appctx->m) {
635     mend--;
636     v[0] = 0.0;
637     PetscCall(MatSetValues(B, 1, &mend, 1, &mend, v, INSERT_VALUES));
638   }
639 
640   /*
641      Set matrix rows corresponding to interior data.  We construct the
642      matrix one row at a time.
643   */
644   sc = 1.0 / (appctx->h * appctx->h * 2.0 * (1.0 + t) * (1.0 + t));
645   for (i = mstart; i < mend; i++) {
646     idx[0] = i - 1;
647     idx[1] = i;
648     idx[2] = i + 1;
649     is     = i - mstart + 1;
650     v[0]   = sc * localptr[is];
651     v[1]   = sc * (localptr[is + 1] + localptr[is - 1] - 4.0 * localptr[is]);
652     v[2]   = sc * localptr[is];
653     PetscCall(MatSetValues(B, 1, &i, 3, idx, v, INSERT_VALUES));
654   }
655 
656   /*
657      Restore vector
658   */
659   PetscCall(VecRestoreArrayRead(local_in, &localptr));
660 
661   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
662      Complete the matrix assembly process and set some options
663      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
664   /*
665      Assemble matrix, using the 2-step process:
666        MatAssemblyBegin(), MatAssemblyEnd()
667      Computations can be done while messages are in transition
668      by placing code between these two statements.
669   */
670   PetscCall(MatAssemblyBegin(B, MAT_FINAL_ASSEMBLY));
671   PetscCall(MatAssemblyEnd(B, MAT_FINAL_ASSEMBLY));
672 
673   /*
674      Set and option to indicate that we will never add a new nonzero location
675      to the matrix. If we do, it will generate an error.
676   */
677   PetscCall(MatSetOption(B, MAT_NEW_NONZERO_LOCATION_ERR, PETSC_TRUE));
678 
679   PetscFunctionReturn(PETSC_SUCCESS);
680 }
681 
682 /*TEST
683 
684     test:
685       args: -snes_type vinewtonrsls -ts_type glee -mymonitor -ts_max_steps 10 -nox
686       requires: !single
687 
688 TEST*/
689