xref: /petsc/src/ts/tutorials/ex21.c (revision 9371c9d470a9602b6d10a8bf50c9b2280a79e45a)
1 
2 static char help[] = "Solves a time-dependent nonlinear PDE with lower and upper bounds on the interior grid points. Uses implicit\n\
3 timestepping.  Runtime options include:\n\
4   -M <xg>, where <xg> = number of grid points\n\
5   -debug : Activate debugging printouts\n\
6   -nox   : Deactivate x-window graphics\n\
7   -ul   : lower bound\n\
8   -uh  : upper bound\n\n";
9 
10 /* ------------------------------------------------------------------------
11 
12    This is a variation of ex2.c to solve the PDE
13 
14                u * u_xx
15          u_t = ---------
16                2*(t+1)^2
17 
18     with box constraints on the interior grid points
19     ul <= u(t,x) <= uh with x != 0,1
20     on the domain 0 <= x <= 1, with boundary conditions
21          u(t,0) = t + 1,  u(t,1) = 2*t + 2,
22     and initial condition
23          u(0,x) = 1 + x*x.
24 
25     The exact solution is:
26          u(t,x) = (1 + x*x) * (1 + t)
27 
28     We use by default the backward Euler method.
29 
30   ------------------------------------------------------------------------- */
31 
32 /*
33    Include "petscts.h" to use the PETSc timestepping routines. Note that
34    this file automatically includes "petscsys.h" and other lower-level
35    PETSc include files.
36 
37    Include the "petscdmda.h" to allow us to use the distributed array data
38    structures to manage the parallel grid.
39 */
40 #include <petscts.h>
41 #include <petscdm.h>
42 #include <petscdmda.h>
43 #include <petscdraw.h>
44 
45 /*
46    User-defined application context - contains data needed by the
47    application-provided callback routines.
48 */
49 typedef struct {
50   MPI_Comm  comm;      /* communicator */
51   DM        da;        /* distributed array data structure */
52   Vec       localwork; /* local ghosted work vector */
53   Vec       u_local;   /* local ghosted approximate solution vector */
54   Vec       solution;  /* global exact solution vector */
55   PetscInt  m;         /* total number of grid points */
56   PetscReal h;         /* mesh width: h = 1/(m-1) */
57   PetscBool debug;     /* flag (1 indicates activation of debugging printouts) */
58 } AppCtx;
59 
60 /*
61    User-defined routines, provided below.
62 */
63 extern PetscErrorCode InitialConditions(Vec, AppCtx *);
64 extern PetscErrorCode RHSFunction(TS, PetscReal, Vec, Vec, void *);
65 extern PetscErrorCode RHSJacobian(TS, PetscReal, Vec, Mat, Mat, void *);
66 extern PetscErrorCode Monitor(TS, PetscInt, PetscReal, Vec, void *);
67 extern PetscErrorCode ExactSolution(PetscReal, Vec, AppCtx *);
68 extern PetscErrorCode SetBounds(Vec, Vec, PetscScalar, PetscScalar, AppCtx *);
69 
70 int main(int argc, char **argv) {
71   AppCtx      appctx;                /* user-defined application context */
72   TS          ts;                    /* timestepping context */
73   Mat         A;                     /* Jacobian matrix data structure */
74   Vec         u;                     /* approximate solution vector */
75   Vec         r;                     /* residual vector */
76   PetscInt    time_steps_max = 1000; /* default max timesteps */
77   PetscReal   dt;
78   PetscReal   time_total_max = 100.0; /* default max total time */
79   Vec         xl, xu;                 /* Lower and upper bounds on variables */
80   PetscScalar ul = 0.0, uh = 3.0;
81   PetscBool   mymonitor;
82   PetscReal   bounds[] = {1.0, 3.3};
83 
84   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
85      Initialize program and set problem parameters
86      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
87 
88   PetscFunctionBeginUser;
89   PetscCall(PetscInitialize(&argc, &argv, (char *)0, help));
90   PetscCall(PetscViewerDrawSetBounds(PETSC_VIEWER_DRAW_(PETSC_COMM_WORLD), 1, bounds));
91 
92   appctx.comm = PETSC_COMM_WORLD;
93   appctx.m    = 60;
94   PetscCall(PetscOptionsGetInt(NULL, NULL, "-M", &appctx.m, NULL));
95   PetscCall(PetscOptionsGetScalar(NULL, NULL, "-ul", &ul, NULL));
96   PetscCall(PetscOptionsGetScalar(NULL, NULL, "-uh", &uh, NULL));
97   PetscCall(PetscOptionsHasName(NULL, NULL, "-debug", &appctx.debug));
98   PetscCall(PetscOptionsHasName(NULL, NULL, "-mymonitor", &mymonitor));
99   appctx.h = 1.0 / (appctx.m - 1.0);
100 
101   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
102      Create vector data structures
103      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
104 
105   /*
106      Create distributed array (DMDA) to manage parallel grid and vectors
107      and to set up the ghost point communication pattern.  There are M
108      total grid values spread equally among all the processors.
109   */
110   PetscCall(DMDACreate1d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, appctx.m, 1, 1, NULL, &appctx.da));
111   PetscCall(DMSetFromOptions(appctx.da));
112   PetscCall(DMSetUp(appctx.da));
113 
114   /*
115      Extract global and local vectors from DMDA; we use these to store the
116      approximate solution.  Then duplicate these for remaining vectors that
117      have the same types.
118   */
119   PetscCall(DMCreateGlobalVector(appctx.da, &u));
120   PetscCall(DMCreateLocalVector(appctx.da, &appctx.u_local));
121 
122   /*
123      Create local work vector for use in evaluating right-hand-side function;
124      create global work vector for storing exact solution.
125   */
126   PetscCall(VecDuplicate(appctx.u_local, &appctx.localwork));
127   PetscCall(VecDuplicate(u, &appctx.solution));
128 
129   /* Create residual vector */
130   PetscCall(VecDuplicate(u, &r));
131   /* Create lower and upper bound vectors */
132   PetscCall(VecDuplicate(u, &xl));
133   PetscCall(VecDuplicate(u, &xu));
134   PetscCall(SetBounds(xl, xu, ul, uh, &appctx));
135 
136   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
137      Create timestepping solver context; set callback routine for
138      right-hand-side function evaluation.
139      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
140 
141   PetscCall(TSCreate(PETSC_COMM_WORLD, &ts));
142   PetscCall(TSSetProblemType(ts, TS_NONLINEAR));
143   PetscCall(TSSetRHSFunction(ts, r, RHSFunction, &appctx));
144 
145   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
146      Set optional user-defined monitoring routine
147      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
148 
149   if (mymonitor) { PetscCall(TSMonitorSet(ts, Monitor, &appctx, NULL)); }
150 
151   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
152      For nonlinear problems, the user can provide a Jacobian evaluation
153      routine (or use a finite differencing approximation).
154 
155      Create matrix data structure; set Jacobian evaluation routine.
156      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
157 
158   PetscCall(MatCreate(PETSC_COMM_WORLD, &A));
159   PetscCall(MatSetSizes(A, PETSC_DECIDE, PETSC_DECIDE, appctx.m, appctx.m));
160   PetscCall(MatSetFromOptions(A));
161   PetscCall(MatSetUp(A));
162   PetscCall(TSSetRHSJacobian(ts, A, A, RHSJacobian, &appctx));
163 
164   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
165      Set solution vector and initial timestep
166      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
167 
168   dt = appctx.h / 2.0;
169   PetscCall(TSSetTimeStep(ts, dt));
170 
171   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
172      Customize timestepping solver:
173        - Set the solution method to be the Backward Euler method.
174        - Set timestepping duration info
175      Then set runtime options, which can override these defaults.
176      For example,
177           -ts_max_steps <maxsteps> -ts_max_time <maxtime>
178      to override the defaults set by TSSetMaxSteps()/TSSetMaxTime().
179      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
180 
181   PetscCall(TSSetType(ts, TSBEULER));
182   PetscCall(TSSetMaxSteps(ts, time_steps_max));
183   PetscCall(TSSetMaxTime(ts, time_total_max));
184   PetscCall(TSSetExactFinalTime(ts, TS_EXACTFINALTIME_STEPOVER));
185   /* Set lower and upper bound on the solution vector for each time step */
186   PetscCall(TSVISetVariableBounds(ts, xl, xu));
187   PetscCall(TSSetFromOptions(ts));
188 
189   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
190      Solve the problem
191      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
192 
193   /*
194      Evaluate initial conditions
195   */
196   PetscCall(InitialConditions(u, &appctx));
197 
198   /*
199      Run the timestepping solver
200   */
201   PetscCall(TSSolve(ts, u));
202 
203   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
204      Free work space.  All PETSc objects should be destroyed when they
205      are no longer needed.
206      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
207 
208   PetscCall(VecDestroy(&r));
209   PetscCall(VecDestroy(&xl));
210   PetscCall(VecDestroy(&xu));
211   PetscCall(TSDestroy(&ts));
212   PetscCall(VecDestroy(&u));
213   PetscCall(MatDestroy(&A));
214   PetscCall(DMDestroy(&appctx.da));
215   PetscCall(VecDestroy(&appctx.localwork));
216   PetscCall(VecDestroy(&appctx.solution));
217   PetscCall(VecDestroy(&appctx.u_local));
218 
219   /*
220      Always call PetscFinalize() before exiting a program.  This routine
221        - finalizes the PETSc libraries as well as MPI
222        - provides summary and diagnostic information if certain runtime
223          options are chosen (e.g., -log_view).
224   */
225   PetscCall(PetscFinalize());
226   return 0;
227 }
228 /* --------------------------------------------------------------------- */
229 /*
230    InitialConditions - Computes the solution at the initial time.
231 
232    Input Parameters:
233    u - uninitialized solution vector (global)
234    appctx - user-defined application context
235 
236    Output Parameter:
237    u - vector with solution at initial time (global)
238 */
239 PetscErrorCode InitialConditions(Vec u, AppCtx *appctx) {
240   PetscScalar *u_localptr, h = appctx->h, x;
241   PetscInt     i, mybase, myend;
242 
243   /*
244      Determine starting point of each processor's range of
245      grid values.
246   */
247   PetscCall(VecGetOwnershipRange(u, &mybase, &myend));
248 
249   /*
250     Get a pointer to vector data.
251     - For default PETSc vectors, VecGetArray() returns a pointer to
252       the data array.  Otherwise, the routine is implementation dependent.
253     - You MUST call VecRestoreArray() when you no longer need access to
254       the array.
255     - Note that the Fortran interface to VecGetArray() differs from the
256       C version.  See the users manual for details.
257   */
258   PetscCall(VecGetArray(u, &u_localptr));
259 
260   /*
261      We initialize the solution array by simply writing the solution
262      directly into the array locations.  Alternatively, we could use
263      VecSetValues() or VecSetValuesLocal().
264   */
265   for (i = mybase; i < myend; i++) {
266     x                      = h * (PetscReal)i; /* current location in global grid */
267     u_localptr[i - mybase] = 1.0 + x * x;
268   }
269 
270   /*
271      Restore vector
272   */
273   PetscCall(VecRestoreArray(u, &u_localptr));
274 
275   /*
276      Print debugging information if desired
277   */
278   if (appctx->debug) {
279     PetscCall(PetscPrintf(appctx->comm, "initial guess vector\n"));
280     PetscCall(VecView(u, PETSC_VIEWER_STDOUT_WORLD));
281   }
282 
283   return 0;
284 }
285 
286 /* --------------------------------------------------------------------- */
287 /*
288   SetBounds - Sets the lower and uper bounds on the interior points
289 
290   Input parameters:
291   xl - vector of lower bounds
292   xu - vector of upper bounds
293   ul - constant lower bound for all variables
294   uh - constant upper bound for all variables
295   appctx - Application context
296  */
297 PetscErrorCode SetBounds(Vec xl, Vec xu, PetscScalar ul, PetscScalar uh, AppCtx *appctx) {
298   PetscScalar *l, *u;
299   PetscMPIInt  rank, size;
300   PetscInt     localsize;
301 
302   PetscFunctionBeginUser;
303   PetscCall(VecSet(xl, ul));
304   PetscCall(VecSet(xu, uh));
305   PetscCall(VecGetLocalSize(xl, &localsize));
306   PetscCall(VecGetArray(xl, &l));
307   PetscCall(VecGetArray(xu, &u));
308 
309   PetscCallMPI(MPI_Comm_rank(appctx->comm, &rank));
310   PetscCallMPI(MPI_Comm_size(appctx->comm, &size));
311   if (rank == 0) {
312     l[0] = -PETSC_INFINITY;
313     u[0] = PETSC_INFINITY;
314   }
315   if (rank == size - 1) {
316     l[localsize - 1] = -PETSC_INFINITY;
317     u[localsize - 1] = PETSC_INFINITY;
318   }
319   PetscCall(VecRestoreArray(xl, &l));
320   PetscCall(VecRestoreArray(xu, &u));
321   PetscFunctionReturn(0);
322 }
323 
324 /* --------------------------------------------------------------------- */
325 /*
326    ExactSolution - Computes the exact solution at a given time.
327 
328    Input Parameters:
329    t - current time
330    solution - vector in which exact solution will be computed
331    appctx - user-defined application context
332 
333    Output Parameter:
334    solution - vector with the newly computed exact solution
335 */
336 PetscErrorCode ExactSolution(PetscReal t, Vec solution, AppCtx *appctx) {
337   PetscScalar *s_localptr, h = appctx->h, x;
338   PetscInt     i, mybase, myend;
339 
340   /*
341      Determine starting and ending points of each processor's
342      range of grid values
343   */
344   PetscCall(VecGetOwnershipRange(solution, &mybase, &myend));
345 
346   /*
347      Get a pointer to vector data.
348   */
349   PetscCall(VecGetArray(solution, &s_localptr));
350 
351   /*
352      Simply write the solution directly into the array locations.
353      Alternatively, we could use VecSetValues() or VecSetValuesLocal().
354   */
355   for (i = mybase; i < myend; i++) {
356     x                      = h * (PetscReal)i;
357     s_localptr[i - mybase] = (t + 1.0) * (1.0 + x * x);
358   }
359 
360   /*
361      Restore vector
362   */
363   PetscCall(VecRestoreArray(solution, &s_localptr));
364   return 0;
365 }
366 /* --------------------------------------------------------------------- */
367 /*
368    Monitor - User-provided routine to monitor the solution computed at
369    each timestep.  This example plots the solution and computes the
370    error in two different norms.
371 
372    Input Parameters:
373    ts     - the timestep context
374    step   - the count of the current step (with 0 meaning the
375             initial condition)
376    time   - the current time
377    u      - the solution at this timestep
378    ctx    - the user-provided context for this monitoring routine.
379             In this case we use the application context which contains
380             information about the problem size, workspace and the exact
381             solution.
382 */
383 PetscErrorCode Monitor(TS ts, PetscInt step, PetscReal time, Vec u, void *ctx) {
384   AppCtx   *appctx = (AppCtx *)ctx; /* user-defined application context */
385   PetscReal en2, en2s, enmax;
386   PetscDraw draw;
387 
388   /*
389      We use the default X windows viewer
390              PETSC_VIEWER_DRAW_(appctx->comm)
391      that is associated with the current communicator. This saves
392      the effort of calling PetscViewerDrawOpen() to create the window.
393      Note that if we wished to plot several items in separate windows we
394      would create each viewer with PetscViewerDrawOpen() and store them in
395      the application context, appctx.
396 
397      PetscReal buffering makes graphics look better.
398   */
399   PetscCall(PetscViewerDrawGetDraw(PETSC_VIEWER_DRAW_(appctx->comm), 0, &draw));
400   PetscCall(PetscDrawSetDoubleBuffer(draw));
401   PetscCall(VecView(u, PETSC_VIEWER_DRAW_(appctx->comm)));
402 
403   /*
404      Compute the exact solution at this timestep
405   */
406   PetscCall(ExactSolution(time, appctx->solution, appctx));
407 
408   /*
409      Print debugging information if desired
410   */
411   if (appctx->debug) {
412     PetscCall(PetscPrintf(appctx->comm, "Computed solution vector\n"));
413     PetscCall(VecView(u, PETSC_VIEWER_STDOUT_WORLD));
414     PetscCall(PetscPrintf(appctx->comm, "Exact solution vector\n"));
415     PetscCall(VecView(appctx->solution, PETSC_VIEWER_STDOUT_WORLD));
416   }
417 
418   /*
419      Compute the 2-norm and max-norm of the error
420   */
421   PetscCall(VecAXPY(appctx->solution, -1.0, u));
422   PetscCall(VecNorm(appctx->solution, NORM_2, &en2));
423   en2s = PetscSqrtReal(appctx->h) * en2; /* scale the 2-norm by the grid spacing */
424   PetscCall(VecNorm(appctx->solution, NORM_MAX, &enmax));
425 
426   /*
427      PetscPrintf() causes only the first processor in this
428      communicator to print the timestep information.
429   */
430   PetscCall(PetscPrintf(appctx->comm, "Timestep %" PetscInt_FMT ": time = %g,2-norm error = %g, max norm error = %g\n", step, (double)time, (double)en2s, (double)enmax));
431 
432   /*
433      Print debugging information if desired
434    */
435   /*  if (appctx->debug) {
436      PetscCall(PetscPrintf(appctx->comm,"Error vector\n"));
437      PetscCall(VecView(appctx->solution,PETSC_VIEWER_STDOUT_WORLD));
438    } */
439   return 0;
440 }
441 /* --------------------------------------------------------------------- */
442 /*
443    RHSFunction - User-provided routine that evalues the right-hand-side
444    function of the ODE.  This routine is set in the main program by
445    calling TSSetRHSFunction().  We compute:
446           global_out = F(global_in)
447 
448    Input Parameters:
449    ts         - timesteping context
450    t          - current time
451    global_in  - vector containing the current iterate
452    ctx        - (optional) user-provided context for function evaluation.
453                 In this case we use the appctx defined above.
454 
455    Output Parameter:
456    global_out - vector containing the newly evaluated function
457 */
458 PetscErrorCode RHSFunction(TS ts, PetscReal t, Vec global_in, Vec global_out, void *ctx) {
459   AppCtx            *appctx    = (AppCtx *)ctx;     /* user-defined application context */
460   DM                 da        = appctx->da;        /* distributed array */
461   Vec                local_in  = appctx->u_local;   /* local ghosted input vector */
462   Vec                localwork = appctx->localwork; /* local ghosted work vector */
463   PetscInt           i, localsize;
464   PetscMPIInt        rank, size;
465   PetscScalar       *copyptr, sc;
466   const PetscScalar *localptr;
467 
468   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
469      Get ready for local function computations
470      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
471   /*
472      Scatter ghost points to local vector, using the 2-step process
473         DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
474      By placing code between these two statements, computations can be
475      done while messages are in transition.
476   */
477   PetscCall(DMGlobalToLocalBegin(da, global_in, INSERT_VALUES, local_in));
478   PetscCall(DMGlobalToLocalEnd(da, global_in, INSERT_VALUES, local_in));
479 
480   /*
481       Access directly the values in our local INPUT work array
482   */
483   PetscCall(VecGetArrayRead(local_in, &localptr));
484 
485   /*
486       Access directly the values in our local OUTPUT work array
487   */
488   PetscCall(VecGetArray(localwork, &copyptr));
489 
490   sc = 1.0 / (appctx->h * appctx->h * 2.0 * (1.0 + t) * (1.0 + t));
491 
492   /*
493       Evaluate our function on the nodes owned by this processor
494   */
495   PetscCall(VecGetLocalSize(local_in, &localsize));
496 
497   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
498      Compute entries for the locally owned part
499      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
500 
501   /*
502      Handle boundary conditions: This is done by using the boundary condition
503         u(t,boundary) = g(t,boundary)
504      for some function g. Now take the derivative with respect to t to obtain
505         u_{t}(t,boundary) = g_{t}(t,boundary)
506 
507      In our case, u(t,0) = t + 1, so that u_{t}(t,0) = 1
508              and  u(t,1) = 2t+ 2, so that u_{t}(t,1) = 2
509   */
510   PetscCallMPI(MPI_Comm_rank(appctx->comm, &rank));
511   PetscCallMPI(MPI_Comm_size(appctx->comm, &size));
512   if (rank == 0) copyptr[0] = 1.0;
513   if (rank == size - 1) copyptr[localsize - 1] = (t < .5) ? 2.0 : 0.0;
514 
515   /*
516      Handle the interior nodes where the PDE is replace by finite
517      difference operators.
518   */
519   for (i = 1; i < localsize - 1; i++) copyptr[i] = localptr[i] * sc * (localptr[i + 1] + localptr[i - 1] - 2.0 * localptr[i]);
520 
521   /*
522      Restore vectors
523   */
524   PetscCall(VecRestoreArrayRead(local_in, &localptr));
525   PetscCall(VecRestoreArray(localwork, &copyptr));
526 
527   /*
528      Insert values from the local OUTPUT vector into the global
529      output vector
530   */
531   PetscCall(DMLocalToGlobalBegin(da, localwork, INSERT_VALUES, global_out));
532   PetscCall(DMLocalToGlobalEnd(da, localwork, INSERT_VALUES, global_out));
533 
534   /* Print debugging information if desired */
535   /*  if (appctx->debug) {
536      PetscCall(PetscPrintf(appctx->comm,"RHS function vector\n"));
537      PetscCall(VecView(global_out,PETSC_VIEWER_STDOUT_WORLD));
538    } */
539 
540   return 0;
541 }
542 /* --------------------------------------------------------------------- */
543 /*
544    RHSJacobian - User-provided routine to compute the Jacobian of
545    the nonlinear right-hand-side function of the ODE.
546 
547    Input Parameters:
548    ts - the TS context
549    t - current time
550    global_in - global input vector
551    dummy - optional user-defined context, as set by TSetRHSJacobian()
552 
553    Output Parameters:
554    AA - Jacobian matrix
555    BB - optionally different preconditioning matrix
556    str - flag indicating matrix structure
557 
558   Notes:
559   RHSJacobian computes entries for the locally owned part of the Jacobian.
560    - Currently, all PETSc parallel matrix formats are partitioned by
561      contiguous chunks of rows across the processors.
562    - Each processor needs to insert only elements that it owns
563      locally (but any non-local elements will be sent to the
564      appropriate processor during matrix assembly).
565    - Always specify global row and columns of matrix entries when
566      using MatSetValues().
567    - Here, we set all entries for a particular row at once.
568    - Note that MatSetValues() uses 0-based row and column numbers
569      in Fortran as well as in C.
570 */
571 PetscErrorCode RHSJacobian(TS ts, PetscReal t, Vec global_in, Mat AA, Mat B, void *ctx) {
572   AppCtx            *appctx   = (AppCtx *)ctx;   /* user-defined application context */
573   Vec                local_in = appctx->u_local; /* local ghosted input vector */
574   DM                 da       = appctx->da;      /* distributed array */
575   PetscScalar        v[3], sc;
576   const PetscScalar *localptr;
577   PetscInt           i, mstart, mend, mstarts, mends, idx[3], is;
578 
579   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
580      Get ready for local Jacobian computations
581      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
582   /*
583      Scatter ghost points to local vector, using the 2-step process
584         DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
585      By placing code between these two statements, computations can be
586      done while messages are in transition.
587   */
588   PetscCall(DMGlobalToLocalBegin(da, global_in, INSERT_VALUES, local_in));
589   PetscCall(DMGlobalToLocalEnd(da, global_in, INSERT_VALUES, local_in));
590 
591   /*
592      Get pointer to vector data
593   */
594   PetscCall(VecGetArrayRead(local_in, &localptr));
595 
596   /*
597      Get starting and ending locally owned rows of the matrix
598   */
599   PetscCall(MatGetOwnershipRange(B, &mstarts, &mends));
600   mstart = mstarts;
601   mend   = mends;
602 
603   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
604      Compute entries for the locally owned part of the Jacobian.
605       - Currently, all PETSc parallel matrix formats are partitioned by
606         contiguous chunks of rows across the processors.
607       - Each processor needs to insert only elements that it owns
608         locally (but any non-local elements will be sent to the
609         appropriate processor during matrix assembly).
610       - Here, we set all entries for a particular row at once.
611       - We can set matrix entries either using either
612         MatSetValuesLocal() or MatSetValues().
613      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
614 
615   /*
616      Set matrix rows corresponding to boundary data
617   */
618   if (mstart == 0) {
619     v[0] = 0.0;
620     PetscCall(MatSetValues(B, 1, &mstart, 1, &mstart, v, INSERT_VALUES));
621     mstart++;
622   }
623   if (mend == appctx->m) {
624     mend--;
625     v[0] = 0.0;
626     PetscCall(MatSetValues(B, 1, &mend, 1, &mend, v, INSERT_VALUES));
627   }
628 
629   /*
630      Set matrix rows corresponding to interior data.  We construct the
631      matrix one row at a time.
632   */
633   sc = 1.0 / (appctx->h * appctx->h * 2.0 * (1.0 + t) * (1.0 + t));
634   for (i = mstart; i < mend; i++) {
635     idx[0] = i - 1;
636     idx[1] = i;
637     idx[2] = i + 1;
638     is     = i - mstart + 1;
639     v[0]   = sc * localptr[is];
640     v[1]   = sc * (localptr[is + 1] + localptr[is - 1] - 4.0 * localptr[is]);
641     v[2]   = sc * localptr[is];
642     PetscCall(MatSetValues(B, 1, &i, 3, idx, v, INSERT_VALUES));
643   }
644 
645   /*
646      Restore vector
647   */
648   PetscCall(VecRestoreArrayRead(local_in, &localptr));
649 
650   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
651      Complete the matrix assembly process and set some options
652      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
653   /*
654      Assemble matrix, using the 2-step process:
655        MatAssemblyBegin(), MatAssemblyEnd()
656      Computations can be done while messages are in transition
657      by placing code between these two statements.
658   */
659   PetscCall(MatAssemblyBegin(B, MAT_FINAL_ASSEMBLY));
660   PetscCall(MatAssemblyEnd(B, MAT_FINAL_ASSEMBLY));
661 
662   /*
663      Set and option to indicate that we will never add a new nonzero location
664      to the matrix. If we do, it will generate an error.
665   */
666   PetscCall(MatSetOption(B, MAT_NEW_NONZERO_LOCATION_ERR, PETSC_TRUE));
667 
668   return 0;
669 }
670 
671 /*TEST
672 
673     test:
674       args: -snes_type vinewtonrsls -ts_type glee -mymonitor -ts_max_steps 10 -nox
675       requires: !single
676 
677 TEST*/
678