xref: /petsc/src/ts/tutorials/ex21.c (revision d71ae5a4db6382e7f06317b8d368875286fe9008)
1 
2 static char help[] = "Solves a time-dependent nonlinear PDE with lower and upper bounds on the interior grid points. Uses implicit\n\
3 timestepping.  Runtime options include:\n\
4   -M <xg>, where <xg> = number of grid points\n\
5   -debug : Activate debugging printouts\n\
6   -nox   : Deactivate x-window graphics\n\
7   -ul   : lower bound\n\
8   -uh  : upper bound\n\n";
9 
10 /* ------------------------------------------------------------------------
11 
12    This is a variation of ex2.c to solve the PDE
13 
14                u * u_xx
15          u_t = ---------
16                2*(t+1)^2
17 
18     with box constraints on the interior grid points
19     ul <= u(t,x) <= uh with x != 0,1
20     on the domain 0 <= x <= 1, with boundary conditions
21          u(t,0) = t + 1,  u(t,1) = 2*t + 2,
22     and initial condition
23          u(0,x) = 1 + x*x.
24 
25     The exact solution is:
26          u(t,x) = (1 + x*x) * (1 + t)
27 
28     We use by default the backward Euler method.
29 
30   ------------------------------------------------------------------------- */
31 
32 /*
33    Include "petscts.h" to use the PETSc timestepping routines. Note that
34    this file automatically includes "petscsys.h" and other lower-level
35    PETSc include files.
36 
37    Include the "petscdmda.h" to allow us to use the distributed array data
38    structures to manage the parallel grid.
39 */
40 #include <petscts.h>
41 #include <petscdm.h>
42 #include <petscdmda.h>
43 #include <petscdraw.h>
44 
45 /*
46    User-defined application context - contains data needed by the
47    application-provided callback routines.
48 */
49 typedef struct {
50   MPI_Comm  comm;      /* communicator */
51   DM        da;        /* distributed array data structure */
52   Vec       localwork; /* local ghosted work vector */
53   Vec       u_local;   /* local ghosted approximate solution vector */
54   Vec       solution;  /* global exact solution vector */
55   PetscInt  m;         /* total number of grid points */
56   PetscReal h;         /* mesh width: h = 1/(m-1) */
57   PetscBool debug;     /* flag (1 indicates activation of debugging printouts) */
58 } AppCtx;
59 
60 /*
61    User-defined routines, provided below.
62 */
63 extern PetscErrorCode InitialConditions(Vec, AppCtx *);
64 extern PetscErrorCode RHSFunction(TS, PetscReal, Vec, Vec, void *);
65 extern PetscErrorCode RHSJacobian(TS, PetscReal, Vec, Mat, Mat, void *);
66 extern PetscErrorCode Monitor(TS, PetscInt, PetscReal, Vec, void *);
67 extern PetscErrorCode ExactSolution(PetscReal, Vec, AppCtx *);
68 extern PetscErrorCode SetBounds(Vec, Vec, PetscScalar, PetscScalar, AppCtx *);
69 
70 int main(int argc, char **argv)
71 {
72   AppCtx      appctx;                /* user-defined application context */
73   TS          ts;                    /* timestepping context */
74   Mat         A;                     /* Jacobian matrix data structure */
75   Vec         u;                     /* approximate solution vector */
76   Vec         r;                     /* residual vector */
77   PetscInt    time_steps_max = 1000; /* default max timesteps */
78   PetscReal   dt;
79   PetscReal   time_total_max = 100.0; /* default max total time */
80   Vec         xl, xu;                 /* Lower and upper bounds on variables */
81   PetscScalar ul = 0.0, uh = 3.0;
82   PetscBool   mymonitor;
83   PetscReal   bounds[] = {1.0, 3.3};
84 
85   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
86      Initialize program and set problem parameters
87      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
88 
89   PetscFunctionBeginUser;
90   PetscCall(PetscInitialize(&argc, &argv, (char *)0, help));
91   PetscCall(PetscViewerDrawSetBounds(PETSC_VIEWER_DRAW_(PETSC_COMM_WORLD), 1, bounds));
92 
93   appctx.comm = PETSC_COMM_WORLD;
94   appctx.m    = 60;
95   PetscCall(PetscOptionsGetInt(NULL, NULL, "-M", &appctx.m, NULL));
96   PetscCall(PetscOptionsGetScalar(NULL, NULL, "-ul", &ul, NULL));
97   PetscCall(PetscOptionsGetScalar(NULL, NULL, "-uh", &uh, NULL));
98   PetscCall(PetscOptionsHasName(NULL, NULL, "-debug", &appctx.debug));
99   PetscCall(PetscOptionsHasName(NULL, NULL, "-mymonitor", &mymonitor));
100   appctx.h = 1.0 / (appctx.m - 1.0);
101 
102   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
103      Create vector data structures
104      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
105 
106   /*
107      Create distributed array (DMDA) to manage parallel grid and vectors
108      and to set up the ghost point communication pattern.  There are M
109      total grid values spread equally among all the processors.
110   */
111   PetscCall(DMDACreate1d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, appctx.m, 1, 1, NULL, &appctx.da));
112   PetscCall(DMSetFromOptions(appctx.da));
113   PetscCall(DMSetUp(appctx.da));
114 
115   /*
116      Extract global and local vectors from DMDA; we use these to store the
117      approximate solution.  Then duplicate these for remaining vectors that
118      have the same types.
119   */
120   PetscCall(DMCreateGlobalVector(appctx.da, &u));
121   PetscCall(DMCreateLocalVector(appctx.da, &appctx.u_local));
122 
123   /*
124      Create local work vector for use in evaluating right-hand-side function;
125      create global work vector for storing exact solution.
126   */
127   PetscCall(VecDuplicate(appctx.u_local, &appctx.localwork));
128   PetscCall(VecDuplicate(u, &appctx.solution));
129 
130   /* Create residual vector */
131   PetscCall(VecDuplicate(u, &r));
132   /* Create lower and upper bound vectors */
133   PetscCall(VecDuplicate(u, &xl));
134   PetscCall(VecDuplicate(u, &xu));
135   PetscCall(SetBounds(xl, xu, ul, uh, &appctx));
136 
137   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
138      Create timestepping solver context; set callback routine for
139      right-hand-side function evaluation.
140      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
141 
142   PetscCall(TSCreate(PETSC_COMM_WORLD, &ts));
143   PetscCall(TSSetProblemType(ts, TS_NONLINEAR));
144   PetscCall(TSSetRHSFunction(ts, r, RHSFunction, &appctx));
145 
146   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
147      Set optional user-defined monitoring routine
148      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
149 
150   if (mymonitor) PetscCall(TSMonitorSet(ts, Monitor, &appctx, NULL));
151 
152   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
153      For nonlinear problems, the user can provide a Jacobian evaluation
154      routine (or use a finite differencing approximation).
155 
156      Create matrix data structure; set Jacobian evaluation routine.
157      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
158 
159   PetscCall(MatCreate(PETSC_COMM_WORLD, &A));
160   PetscCall(MatSetSizes(A, PETSC_DECIDE, PETSC_DECIDE, appctx.m, appctx.m));
161   PetscCall(MatSetFromOptions(A));
162   PetscCall(MatSetUp(A));
163   PetscCall(TSSetRHSJacobian(ts, A, A, RHSJacobian, &appctx));
164 
165   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
166      Set solution vector and initial timestep
167      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
168 
169   dt = appctx.h / 2.0;
170   PetscCall(TSSetTimeStep(ts, dt));
171 
172   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
173      Customize timestepping solver:
174        - Set the solution method to be the Backward Euler method.
175        - Set timestepping duration info
176      Then set runtime options, which can override these defaults.
177      For example,
178           -ts_max_steps <maxsteps> -ts_max_time <maxtime>
179      to override the defaults set by TSSetMaxSteps()/TSSetMaxTime().
180      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
181 
182   PetscCall(TSSetType(ts, TSBEULER));
183   PetscCall(TSSetMaxSteps(ts, time_steps_max));
184   PetscCall(TSSetMaxTime(ts, time_total_max));
185   PetscCall(TSSetExactFinalTime(ts, TS_EXACTFINALTIME_STEPOVER));
186   /* Set lower and upper bound on the solution vector for each time step */
187   PetscCall(TSVISetVariableBounds(ts, xl, xu));
188   PetscCall(TSSetFromOptions(ts));
189 
190   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
191      Solve the problem
192      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
193 
194   /*
195      Evaluate initial conditions
196   */
197   PetscCall(InitialConditions(u, &appctx));
198 
199   /*
200      Run the timestepping solver
201   */
202   PetscCall(TSSolve(ts, u));
203 
204   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
205      Free work space.  All PETSc objects should be destroyed when they
206      are no longer needed.
207      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
208 
209   PetscCall(VecDestroy(&r));
210   PetscCall(VecDestroy(&xl));
211   PetscCall(VecDestroy(&xu));
212   PetscCall(TSDestroy(&ts));
213   PetscCall(VecDestroy(&u));
214   PetscCall(MatDestroy(&A));
215   PetscCall(DMDestroy(&appctx.da));
216   PetscCall(VecDestroy(&appctx.localwork));
217   PetscCall(VecDestroy(&appctx.solution));
218   PetscCall(VecDestroy(&appctx.u_local));
219 
220   /*
221      Always call PetscFinalize() before exiting a program.  This routine
222        - finalizes the PETSc libraries as well as MPI
223        - provides summary and diagnostic information if certain runtime
224          options are chosen (e.g., -log_view).
225   */
226   PetscCall(PetscFinalize());
227   return 0;
228 }
229 /* --------------------------------------------------------------------- */
230 /*
231    InitialConditions - Computes the solution at the initial time.
232 
233    Input Parameters:
234    u - uninitialized solution vector (global)
235    appctx - user-defined application context
236 
237    Output Parameter:
238    u - vector with solution at initial time (global)
239 */
240 PetscErrorCode InitialConditions(Vec u, AppCtx *appctx)
241 {
242   PetscScalar *u_localptr, h = appctx->h, x;
243   PetscInt     i, mybase, myend;
244 
245   /*
246      Determine starting point of each processor's range of
247      grid values.
248   */
249   PetscCall(VecGetOwnershipRange(u, &mybase, &myend));
250 
251   /*
252     Get a pointer to vector data.
253     - For default PETSc vectors, VecGetArray() returns a pointer to
254       the data array.  Otherwise, the routine is implementation dependent.
255     - You MUST call VecRestoreArray() when you no longer need access to
256       the array.
257     - Note that the Fortran interface to VecGetArray() differs from the
258       C version.  See the users manual for details.
259   */
260   PetscCall(VecGetArray(u, &u_localptr));
261 
262   /*
263      We initialize the solution array by simply writing the solution
264      directly into the array locations.  Alternatively, we could use
265      VecSetValues() or VecSetValuesLocal().
266   */
267   for (i = mybase; i < myend; i++) {
268     x                      = h * (PetscReal)i; /* current location in global grid */
269     u_localptr[i - mybase] = 1.0 + x * x;
270   }
271 
272   /*
273      Restore vector
274   */
275   PetscCall(VecRestoreArray(u, &u_localptr));
276 
277   /*
278      Print debugging information if desired
279   */
280   if (appctx->debug) {
281     PetscCall(PetscPrintf(appctx->comm, "initial guess vector\n"));
282     PetscCall(VecView(u, PETSC_VIEWER_STDOUT_WORLD));
283   }
284 
285   return 0;
286 }
287 
288 /* --------------------------------------------------------------------- */
289 /*
290   SetBounds - Sets the lower and uper bounds on the interior points
291 
292   Input parameters:
293   xl - vector of lower bounds
294   xu - vector of upper bounds
295   ul - constant lower bound for all variables
296   uh - constant upper bound for all variables
297   appctx - Application context
298  */
299 PetscErrorCode SetBounds(Vec xl, Vec xu, PetscScalar ul, PetscScalar uh, AppCtx *appctx)
300 {
301   PetscScalar *l, *u;
302   PetscMPIInt  rank, size;
303   PetscInt     localsize;
304 
305   PetscFunctionBeginUser;
306   PetscCall(VecSet(xl, ul));
307   PetscCall(VecSet(xu, uh));
308   PetscCall(VecGetLocalSize(xl, &localsize));
309   PetscCall(VecGetArray(xl, &l));
310   PetscCall(VecGetArray(xu, &u));
311 
312   PetscCallMPI(MPI_Comm_rank(appctx->comm, &rank));
313   PetscCallMPI(MPI_Comm_size(appctx->comm, &size));
314   if (rank == 0) {
315     l[0] = -PETSC_INFINITY;
316     u[0] = PETSC_INFINITY;
317   }
318   if (rank == size - 1) {
319     l[localsize - 1] = -PETSC_INFINITY;
320     u[localsize - 1] = PETSC_INFINITY;
321   }
322   PetscCall(VecRestoreArray(xl, &l));
323   PetscCall(VecRestoreArray(xu, &u));
324   PetscFunctionReturn(0);
325 }
326 
327 /* --------------------------------------------------------------------- */
328 /*
329    ExactSolution - Computes the exact solution at a given time.
330 
331    Input Parameters:
332    t - current time
333    solution - vector in which exact solution will be computed
334    appctx - user-defined application context
335 
336    Output Parameter:
337    solution - vector with the newly computed exact solution
338 */
339 PetscErrorCode ExactSolution(PetscReal t, Vec solution, AppCtx *appctx)
340 {
341   PetscScalar *s_localptr, h = appctx->h, x;
342   PetscInt     i, mybase, myend;
343 
344   /*
345      Determine starting and ending points of each processor's
346      range of grid values
347   */
348   PetscCall(VecGetOwnershipRange(solution, &mybase, &myend));
349 
350   /*
351      Get a pointer to vector data.
352   */
353   PetscCall(VecGetArray(solution, &s_localptr));
354 
355   /*
356      Simply write the solution directly into the array locations.
357      Alternatively, we could use VecSetValues() or VecSetValuesLocal().
358   */
359   for (i = mybase; i < myend; i++) {
360     x                      = h * (PetscReal)i;
361     s_localptr[i - mybase] = (t + 1.0) * (1.0 + x * x);
362   }
363 
364   /*
365      Restore vector
366   */
367   PetscCall(VecRestoreArray(solution, &s_localptr));
368   return 0;
369 }
370 /* --------------------------------------------------------------------- */
371 /*
372    Monitor - User-provided routine to monitor the solution computed at
373    each timestep.  This example plots the solution and computes the
374    error in two different norms.
375 
376    Input Parameters:
377    ts     - the timestep context
378    step   - the count of the current step (with 0 meaning the
379             initial condition)
380    time   - the current time
381    u      - the solution at this timestep
382    ctx    - the user-provided context for this monitoring routine.
383             In this case we use the application context which contains
384             information about the problem size, workspace and the exact
385             solution.
386 */
387 PetscErrorCode Monitor(TS ts, PetscInt step, PetscReal time, Vec u, void *ctx)
388 {
389   AppCtx   *appctx = (AppCtx *)ctx; /* user-defined application context */
390   PetscReal en2, en2s, enmax;
391   PetscDraw draw;
392 
393   /*
394      We use the default X windows viewer
395              PETSC_VIEWER_DRAW_(appctx->comm)
396      that is associated with the current communicator. This saves
397      the effort of calling PetscViewerDrawOpen() to create the window.
398      Note that if we wished to plot several items in separate windows we
399      would create each viewer with PetscViewerDrawOpen() and store them in
400      the application context, appctx.
401 
402      PetscReal buffering makes graphics look better.
403   */
404   PetscCall(PetscViewerDrawGetDraw(PETSC_VIEWER_DRAW_(appctx->comm), 0, &draw));
405   PetscCall(PetscDrawSetDoubleBuffer(draw));
406   PetscCall(VecView(u, PETSC_VIEWER_DRAW_(appctx->comm)));
407 
408   /*
409      Compute the exact solution at this timestep
410   */
411   PetscCall(ExactSolution(time, appctx->solution, appctx));
412 
413   /*
414      Print debugging information if desired
415   */
416   if (appctx->debug) {
417     PetscCall(PetscPrintf(appctx->comm, "Computed solution vector\n"));
418     PetscCall(VecView(u, PETSC_VIEWER_STDOUT_WORLD));
419     PetscCall(PetscPrintf(appctx->comm, "Exact solution vector\n"));
420     PetscCall(VecView(appctx->solution, PETSC_VIEWER_STDOUT_WORLD));
421   }
422 
423   /*
424      Compute the 2-norm and max-norm of the error
425   */
426   PetscCall(VecAXPY(appctx->solution, -1.0, u));
427   PetscCall(VecNorm(appctx->solution, NORM_2, &en2));
428   en2s = PetscSqrtReal(appctx->h) * en2; /* scale the 2-norm by the grid spacing */
429   PetscCall(VecNorm(appctx->solution, NORM_MAX, &enmax));
430 
431   /*
432      PetscPrintf() causes only the first processor in this
433      communicator to print the timestep information.
434   */
435   PetscCall(PetscPrintf(appctx->comm, "Timestep %" PetscInt_FMT ": time = %g,2-norm error = %g, max norm error = %g\n", step, (double)time, (double)en2s, (double)enmax));
436 
437   /*
438      Print debugging information if desired
439    */
440   /*  if (appctx->debug) {
441      PetscCall(PetscPrintf(appctx->comm,"Error vector\n"));
442      PetscCall(VecView(appctx->solution,PETSC_VIEWER_STDOUT_WORLD));
443    } */
444   return 0;
445 }
446 /* --------------------------------------------------------------------- */
447 /*
448    RHSFunction - User-provided routine that evalues the right-hand-side
449    function of the ODE.  This routine is set in the main program by
450    calling TSSetRHSFunction().  We compute:
451           global_out = F(global_in)
452 
453    Input Parameters:
454    ts         - timesteping context
455    t          - current time
456    global_in  - vector containing the current iterate
457    ctx        - (optional) user-provided context for function evaluation.
458                 In this case we use the appctx defined above.
459 
460    Output Parameter:
461    global_out - vector containing the newly evaluated function
462 */
463 PetscErrorCode RHSFunction(TS ts, PetscReal t, Vec global_in, Vec global_out, void *ctx)
464 {
465   AppCtx            *appctx    = (AppCtx *)ctx;     /* user-defined application context */
466   DM                 da        = appctx->da;        /* distributed array */
467   Vec                local_in  = appctx->u_local;   /* local ghosted input vector */
468   Vec                localwork = appctx->localwork; /* local ghosted work vector */
469   PetscInt           i, localsize;
470   PetscMPIInt        rank, size;
471   PetscScalar       *copyptr, sc;
472   const PetscScalar *localptr;
473 
474   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
475      Get ready for local function computations
476      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
477   /*
478      Scatter ghost points to local vector, using the 2-step process
479         DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
480      By placing code between these two statements, computations can be
481      done while messages are in transition.
482   */
483   PetscCall(DMGlobalToLocalBegin(da, global_in, INSERT_VALUES, local_in));
484   PetscCall(DMGlobalToLocalEnd(da, global_in, INSERT_VALUES, local_in));
485 
486   /*
487       Access directly the values in our local INPUT work array
488   */
489   PetscCall(VecGetArrayRead(local_in, &localptr));
490 
491   /*
492       Access directly the values in our local OUTPUT work array
493   */
494   PetscCall(VecGetArray(localwork, &copyptr));
495 
496   sc = 1.0 / (appctx->h * appctx->h * 2.0 * (1.0 + t) * (1.0 + t));
497 
498   /*
499       Evaluate our function on the nodes owned by this processor
500   */
501   PetscCall(VecGetLocalSize(local_in, &localsize));
502 
503   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
504      Compute entries for the locally owned part
505      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
506 
507   /*
508      Handle boundary conditions: This is done by using the boundary condition
509         u(t,boundary) = g(t,boundary)
510      for some function g. Now take the derivative with respect to t to obtain
511         u_{t}(t,boundary) = g_{t}(t,boundary)
512 
513      In our case, u(t,0) = t + 1, so that u_{t}(t,0) = 1
514              and  u(t,1) = 2t+ 2, so that u_{t}(t,1) = 2
515   */
516   PetscCallMPI(MPI_Comm_rank(appctx->comm, &rank));
517   PetscCallMPI(MPI_Comm_size(appctx->comm, &size));
518   if (rank == 0) copyptr[0] = 1.0;
519   if (rank == size - 1) copyptr[localsize - 1] = (t < .5) ? 2.0 : 0.0;
520 
521   /*
522      Handle the interior nodes where the PDE is replace by finite
523      difference operators.
524   */
525   for (i = 1; i < localsize - 1; i++) copyptr[i] = localptr[i] * sc * (localptr[i + 1] + localptr[i - 1] - 2.0 * localptr[i]);
526 
527   /*
528      Restore vectors
529   */
530   PetscCall(VecRestoreArrayRead(local_in, &localptr));
531   PetscCall(VecRestoreArray(localwork, &copyptr));
532 
533   /*
534      Insert values from the local OUTPUT vector into the global
535      output vector
536   */
537   PetscCall(DMLocalToGlobalBegin(da, localwork, INSERT_VALUES, global_out));
538   PetscCall(DMLocalToGlobalEnd(da, localwork, INSERT_VALUES, global_out));
539 
540   /* Print debugging information if desired */
541   /*  if (appctx->debug) {
542      PetscCall(PetscPrintf(appctx->comm,"RHS function vector\n"));
543      PetscCall(VecView(global_out,PETSC_VIEWER_STDOUT_WORLD));
544    } */
545 
546   return 0;
547 }
548 /* --------------------------------------------------------------------- */
549 /*
550    RHSJacobian - User-provided routine to compute the Jacobian of
551    the nonlinear right-hand-side function of the ODE.
552 
553    Input Parameters:
554    ts - the TS context
555    t - current time
556    global_in - global input vector
557    dummy - optional user-defined context, as set by TSetRHSJacobian()
558 
559    Output Parameters:
560    AA - Jacobian matrix
561    BB - optionally different preconditioning matrix
562    str - flag indicating matrix structure
563 
564   Notes:
565   RHSJacobian computes entries for the locally owned part of the Jacobian.
566    - Currently, all PETSc parallel matrix formats are partitioned by
567      contiguous chunks of rows across the processors.
568    - Each processor needs to insert only elements that it owns
569      locally (but any non-local elements will be sent to the
570      appropriate processor during matrix assembly).
571    - Always specify global row and columns of matrix entries when
572      using MatSetValues().
573    - Here, we set all entries for a particular row at once.
574    - Note that MatSetValues() uses 0-based row and column numbers
575      in Fortran as well as in C.
576 */
577 PetscErrorCode RHSJacobian(TS ts, PetscReal t, Vec global_in, Mat AA, Mat B, void *ctx)
578 {
579   AppCtx            *appctx   = (AppCtx *)ctx;   /* user-defined application context */
580   Vec                local_in = appctx->u_local; /* local ghosted input vector */
581   DM                 da       = appctx->da;      /* distributed array */
582   PetscScalar        v[3], sc;
583   const PetscScalar *localptr;
584   PetscInt           i, mstart, mend, mstarts, mends, idx[3], is;
585 
586   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
587      Get ready for local Jacobian computations
588      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
589   /*
590      Scatter ghost points to local vector, using the 2-step process
591         DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
592      By placing code between these two statements, computations can be
593      done while messages are in transition.
594   */
595   PetscCall(DMGlobalToLocalBegin(da, global_in, INSERT_VALUES, local_in));
596   PetscCall(DMGlobalToLocalEnd(da, global_in, INSERT_VALUES, local_in));
597 
598   /*
599      Get pointer to vector data
600   */
601   PetscCall(VecGetArrayRead(local_in, &localptr));
602 
603   /*
604      Get starting and ending locally owned rows of the matrix
605   */
606   PetscCall(MatGetOwnershipRange(B, &mstarts, &mends));
607   mstart = mstarts;
608   mend   = mends;
609 
610   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
611      Compute entries for the locally owned part of the Jacobian.
612       - Currently, all PETSc parallel matrix formats are partitioned by
613         contiguous chunks of rows across the processors.
614       - Each processor needs to insert only elements that it owns
615         locally (but any non-local elements will be sent to the
616         appropriate processor during matrix assembly).
617       - Here, we set all entries for a particular row at once.
618       - We can set matrix entries either using either
619         MatSetValuesLocal() or MatSetValues().
620      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
621 
622   /*
623      Set matrix rows corresponding to boundary data
624   */
625   if (mstart == 0) {
626     v[0] = 0.0;
627     PetscCall(MatSetValues(B, 1, &mstart, 1, &mstart, v, INSERT_VALUES));
628     mstart++;
629   }
630   if (mend == appctx->m) {
631     mend--;
632     v[0] = 0.0;
633     PetscCall(MatSetValues(B, 1, &mend, 1, &mend, v, INSERT_VALUES));
634   }
635 
636   /*
637      Set matrix rows corresponding to interior data.  We construct the
638      matrix one row at a time.
639   */
640   sc = 1.0 / (appctx->h * appctx->h * 2.0 * (1.0 + t) * (1.0 + t));
641   for (i = mstart; i < mend; i++) {
642     idx[0] = i - 1;
643     idx[1] = i;
644     idx[2] = i + 1;
645     is     = i - mstart + 1;
646     v[0]   = sc * localptr[is];
647     v[1]   = sc * (localptr[is + 1] + localptr[is - 1] - 4.0 * localptr[is]);
648     v[2]   = sc * localptr[is];
649     PetscCall(MatSetValues(B, 1, &i, 3, idx, v, INSERT_VALUES));
650   }
651 
652   /*
653      Restore vector
654   */
655   PetscCall(VecRestoreArrayRead(local_in, &localptr));
656 
657   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
658      Complete the matrix assembly process and set some options
659      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
660   /*
661      Assemble matrix, using the 2-step process:
662        MatAssemblyBegin(), MatAssemblyEnd()
663      Computations can be done while messages are in transition
664      by placing code between these two statements.
665   */
666   PetscCall(MatAssemblyBegin(B, MAT_FINAL_ASSEMBLY));
667   PetscCall(MatAssemblyEnd(B, MAT_FINAL_ASSEMBLY));
668 
669   /*
670      Set and option to indicate that we will never add a new nonzero location
671      to the matrix. If we do, it will generate an error.
672   */
673   PetscCall(MatSetOption(B, MAT_NEW_NONZERO_LOCATION_ERR, PETSC_TRUE));
674 
675   return 0;
676 }
677 
678 /*TEST
679 
680     test:
681       args: -snes_type vinewtonrsls -ts_type glee -mymonitor -ts_max_steps 10 -nox
682       requires: !single
683 
684 TEST*/
685