xref: /petsc/src/ts/tutorials/ex21.c (revision 5f80ce2ab25dff0f4601e710601cbbcecf323266)
1 
2 static char help[] ="Solves a time-dependent nonlinear PDE with lower and upper bounds on the interior grid points. Uses implicit\n\
3 timestepping.  Runtime options include:\n\
4   -M <xg>, where <xg> = number of grid points\n\
5   -debug : Activate debugging printouts\n\
6   -nox   : Deactivate x-window graphics\n\
7   -ul   : lower bound\n\
8   -uh  : upper bound\n\n";
9 
10 /*
11    Concepts: TS^time-dependent nonlinear problems
12    Concepts: TS^Variational inequality nonlinear solver
13    Processors: n
14 */
15 
16 /* ------------------------------------------------------------------------
17 
18    This is a variation of ex2.c to solve the PDE
19 
20                u * u_xx
21          u_t = ---------
22                2*(t+1)^2
23 
24     with box constraints on the interior grid points
25     ul <= u(t,x) <= uh with x != 0,1
26     on the domain 0 <= x <= 1, with boundary conditions
27          u(t,0) = t + 1,  u(t,1) = 2*t + 2,
28     and initial condition
29          u(0,x) = 1 + x*x.
30 
31     The exact solution is:
32          u(t,x) = (1 + x*x) * (1 + t)
33 
34     We use by default the backward Euler method.
35 
36   ------------------------------------------------------------------------- */
37 
38 /*
39    Include "petscts.h" to use the PETSc timestepping routines. Note that
40    this file automatically includes "petscsys.h" and other lower-level
41    PETSc include files.
42 
43    Include the "petscdmda.h" to allow us to use the distributed array data
44    structures to manage the parallel grid.
45 */
46 #include <petscts.h>
47 #include <petscdm.h>
48 #include <petscdmda.h>
49 #include <petscdraw.h>
50 
51 /*
52    User-defined application context - contains data needed by the
53    application-provided callback routines.
54 */
55 typedef struct {
56   MPI_Comm  comm;           /* communicator */
57   DM        da;             /* distributed array data structure */
58   Vec       localwork;      /* local ghosted work vector */
59   Vec       u_local;        /* local ghosted approximate solution vector */
60   Vec       solution;       /* global exact solution vector */
61   PetscInt  m;              /* total number of grid points */
62   PetscReal h;              /* mesh width: h = 1/(m-1) */
63   PetscBool debug;          /* flag (1 indicates activation of debugging printouts) */
64 } AppCtx;
65 
66 /*
67    User-defined routines, provided below.
68 */
69 extern PetscErrorCode InitialConditions(Vec,AppCtx*);
70 extern PetscErrorCode RHSFunction(TS,PetscReal,Vec,Vec,void*);
71 extern PetscErrorCode RHSJacobian(TS,PetscReal,Vec,Mat,Mat,void*);
72 extern PetscErrorCode Monitor(TS,PetscInt,PetscReal,Vec,void*);
73 extern PetscErrorCode ExactSolution(PetscReal,Vec,AppCtx*);
74 extern PetscErrorCode SetBounds(Vec,Vec,PetscScalar,PetscScalar,AppCtx*);
75 
76 int main(int argc,char **argv)
77 {
78   AppCtx         appctx;                 /* user-defined application context */
79   TS             ts;                     /* timestepping context */
80   Mat            A;                      /* Jacobian matrix data structure */
81   Vec            u;                      /* approximate solution vector */
82   Vec            r;                      /* residual vector */
83   PetscInt       time_steps_max = 1000;  /* default max timesteps */
84   PetscErrorCode ierr;
85   PetscReal      dt;
86   PetscReal      time_total_max = 100.0; /* default max total time */
87   Vec            xl,xu; /* Lower and upper bounds on variables */
88   PetscScalar    ul=0.0,uh = 3.0;
89   PetscBool      mymonitor;
90   PetscReal      bounds[] = {1.0, 3.3};
91 
92   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
93      Initialize program and set problem parameters
94      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
95 
96   ierr = PetscInitialize(&argc,&argv,(char*)0,help);if (ierr) return ierr;
97   CHKERRQ(PetscViewerDrawSetBounds(PETSC_VIEWER_DRAW_(PETSC_COMM_WORLD),1,bounds));
98 
99   appctx.comm = PETSC_COMM_WORLD;
100   appctx.m    = 60;
101   CHKERRQ(PetscOptionsGetInt(NULL,NULL,"-M",&appctx.m,NULL));
102   CHKERRQ(PetscOptionsGetScalar(NULL,NULL,"-ul",&ul,NULL));
103   CHKERRQ(PetscOptionsGetScalar(NULL,NULL,"-uh",&uh,NULL));
104   CHKERRQ(PetscOptionsHasName(NULL,NULL,"-debug",&appctx.debug));
105   CHKERRQ(PetscOptionsHasName(NULL,NULL,"-mymonitor",&mymonitor));
106   appctx.h    = 1.0/(appctx.m-1.0);
107 
108   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
109      Create vector data structures
110      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
111 
112   /*
113      Create distributed array (DMDA) to manage parallel grid and vectors
114      and to set up the ghost point communication pattern.  There are M
115      total grid values spread equally among all the processors.
116   */
117   CHKERRQ(DMDACreate1d(PETSC_COMM_WORLD,DM_BOUNDARY_NONE,appctx.m,1,1,NULL,&appctx.da));
118   CHKERRQ(DMSetFromOptions(appctx.da));
119   CHKERRQ(DMSetUp(appctx.da));
120 
121   /*
122      Extract global and local vectors from DMDA; we use these to store the
123      approximate solution.  Then duplicate these for remaining vectors that
124      have the same types.
125   */
126   CHKERRQ(DMCreateGlobalVector(appctx.da,&u));
127   CHKERRQ(DMCreateLocalVector(appctx.da,&appctx.u_local));
128 
129   /*
130      Create local work vector for use in evaluating right-hand-side function;
131      create global work vector for storing exact solution.
132   */
133   CHKERRQ(VecDuplicate(appctx.u_local,&appctx.localwork));
134   CHKERRQ(VecDuplicate(u,&appctx.solution));
135 
136   /* Create residual vector */
137   CHKERRQ(VecDuplicate(u,&r));
138   /* Create lower and upper bound vectors */
139   CHKERRQ(VecDuplicate(u,&xl));
140   CHKERRQ(VecDuplicate(u,&xu));
141   CHKERRQ(SetBounds(xl,xu,ul,uh,&appctx));
142 
143   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
144      Create timestepping solver context; set callback routine for
145      right-hand-side function evaluation.
146      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
147 
148   CHKERRQ(TSCreate(PETSC_COMM_WORLD,&ts));
149   CHKERRQ(TSSetProblemType(ts,TS_NONLINEAR));
150   CHKERRQ(TSSetRHSFunction(ts,r,RHSFunction,&appctx));
151 
152   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
153      Set optional user-defined monitoring routine
154      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
155 
156   if (mymonitor) {
157     CHKERRQ(TSMonitorSet(ts,Monitor,&appctx,NULL));
158   }
159 
160   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
161      For nonlinear problems, the user can provide a Jacobian evaluation
162      routine (or use a finite differencing approximation).
163 
164      Create matrix data structure; set Jacobian evaluation routine.
165      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
166 
167   CHKERRQ(MatCreate(PETSC_COMM_WORLD,&A));
168   CHKERRQ(MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,appctx.m,appctx.m));
169   CHKERRQ(MatSetFromOptions(A));
170   CHKERRQ(MatSetUp(A));
171   CHKERRQ(TSSetRHSJacobian(ts,A,A,RHSJacobian,&appctx));
172 
173   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
174      Set solution vector and initial timestep
175      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
176 
177   dt   = appctx.h/2.0;
178   CHKERRQ(TSSetTimeStep(ts,dt));
179 
180   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
181      Customize timestepping solver:
182        - Set the solution method to be the Backward Euler method.
183        - Set timestepping duration info
184      Then set runtime options, which can override these defaults.
185      For example,
186           -ts_max_steps <maxsteps> -ts_max_time <maxtime>
187      to override the defaults set by TSSetMaxSteps()/TSSetMaxTime().
188      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
189 
190   CHKERRQ(TSSetType(ts,TSBEULER));
191   CHKERRQ(TSSetMaxSteps(ts,time_steps_max));
192   CHKERRQ(TSSetMaxTime(ts,time_total_max));
193   CHKERRQ(TSSetExactFinalTime(ts,TS_EXACTFINALTIME_STEPOVER));
194   /* Set lower and upper bound on the solution vector for each time step */
195   CHKERRQ(TSVISetVariableBounds(ts,xl,xu));
196   CHKERRQ(TSSetFromOptions(ts));
197 
198   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
199      Solve the problem
200      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
201 
202   /*
203      Evaluate initial conditions
204   */
205   CHKERRQ(InitialConditions(u,&appctx));
206 
207   /*
208      Run the timestepping solver
209   */
210   CHKERRQ(TSSolve(ts,u));
211 
212   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
213      Free work space.  All PETSc objects should be destroyed when they
214      are no longer needed.
215      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
216 
217   CHKERRQ(VecDestroy(&r));
218   CHKERRQ(VecDestroy(&xl));
219   CHKERRQ(VecDestroy(&xu));
220   CHKERRQ(TSDestroy(&ts));
221   CHKERRQ(VecDestroy(&u));
222   CHKERRQ(MatDestroy(&A));
223   CHKERRQ(DMDestroy(&appctx.da));
224   CHKERRQ(VecDestroy(&appctx.localwork));
225   CHKERRQ(VecDestroy(&appctx.solution));
226   CHKERRQ(VecDestroy(&appctx.u_local));
227 
228   /*
229      Always call PetscFinalize() before exiting a program.  This routine
230        - finalizes the PETSc libraries as well as MPI
231        - provides summary and diagnostic information if certain runtime
232          options are chosen (e.g., -log_view).
233   */
234   ierr = PetscFinalize();
235   return ierr;
236 }
237 /* --------------------------------------------------------------------- */
238 /*
239    InitialConditions - Computes the solution at the initial time.
240 
241    Input Parameters:
242    u - uninitialized solution vector (global)
243    appctx - user-defined application context
244 
245    Output Parameter:
246    u - vector with solution at initial time (global)
247 */
248 PetscErrorCode InitialConditions(Vec u,AppCtx *appctx)
249 {
250   PetscScalar    *u_localptr,h = appctx->h,x;
251   PetscInt       i,mybase,myend;
252 
253   /*
254      Determine starting point of each processor's range of
255      grid values.
256   */
257   CHKERRQ(VecGetOwnershipRange(u,&mybase,&myend));
258 
259   /*
260     Get a pointer to vector data.
261     - For default PETSc vectors, VecGetArray() returns a pointer to
262       the data array.  Otherwise, the routine is implementation dependent.
263     - You MUST call VecRestoreArray() when you no longer need access to
264       the array.
265     - Note that the Fortran interface to VecGetArray() differs from the
266       C version.  See the users manual for details.
267   */
268   CHKERRQ(VecGetArray(u,&u_localptr));
269 
270   /*
271      We initialize the solution array by simply writing the solution
272      directly into the array locations.  Alternatively, we could use
273      VecSetValues() or VecSetValuesLocal().
274   */
275   for (i=mybase; i<myend; i++) {
276     x = h*(PetscReal)i; /* current location in global grid */
277     u_localptr[i-mybase] = 1.0 + x*x;
278   }
279 
280   /*
281      Restore vector
282   */
283   CHKERRQ(VecRestoreArray(u,&u_localptr));
284 
285   /*
286      Print debugging information if desired
287   */
288   if (appctx->debug) {
289      CHKERRQ(PetscPrintf(appctx->comm,"initial guess vector\n"));
290      CHKERRQ(VecView(u,PETSC_VIEWER_STDOUT_WORLD));
291   }
292 
293   return 0;
294 }
295 
296 /* --------------------------------------------------------------------- */
297 /*
298   SetBounds - Sets the lower and uper bounds on the interior points
299 
300   Input parameters:
301   xl - vector of lower bounds
302   xu - vector of upper bounds
303   ul - constant lower bound for all variables
304   uh - constant upper bound for all variables
305   appctx - Application context
306  */
307 PetscErrorCode SetBounds(Vec xl, Vec xu, PetscScalar ul, PetscScalar uh,AppCtx *appctx)
308 {
309   PetscScalar       *l,*u;
310   PetscMPIInt       rank,size;
311   PetscInt          localsize;
312 
313   PetscFunctionBeginUser;
314   CHKERRQ(VecSet(xl,ul));
315   CHKERRQ(VecSet(xu,uh));
316   CHKERRQ(VecGetLocalSize(xl,&localsize));
317   CHKERRQ(VecGetArray(xl,&l));
318   CHKERRQ(VecGetArray(xu,&u));
319 
320   CHKERRMPI(MPI_Comm_rank(appctx->comm,&rank));
321   CHKERRMPI(MPI_Comm_size(appctx->comm,&size));
322   if (rank == 0) {
323     l[0] = -PETSC_INFINITY;
324     u[0] =  PETSC_INFINITY;
325   }
326   if (rank == size-1) {
327     l[localsize-1] = -PETSC_INFINITY;
328     u[localsize-1] = PETSC_INFINITY;
329   }
330   CHKERRQ(VecRestoreArray(xl,&l));
331   CHKERRQ(VecRestoreArray(xu,&u));
332   PetscFunctionReturn(0);
333 }
334 
335 /* --------------------------------------------------------------------- */
336 /*
337    ExactSolution - Computes the exact solution at a given time.
338 
339    Input Parameters:
340    t - current time
341    solution - vector in which exact solution will be computed
342    appctx - user-defined application context
343 
344    Output Parameter:
345    solution - vector with the newly computed exact solution
346 */
347 PetscErrorCode ExactSolution(PetscReal t,Vec solution,AppCtx *appctx)
348 {
349   PetscScalar    *s_localptr,h = appctx->h,x;
350   PetscInt       i,mybase,myend;
351 
352   /*
353      Determine starting and ending points of each processor's
354      range of grid values
355   */
356   CHKERRQ(VecGetOwnershipRange(solution,&mybase,&myend));
357 
358   /*
359      Get a pointer to vector data.
360   */
361   CHKERRQ(VecGetArray(solution,&s_localptr));
362 
363   /*
364      Simply write the solution directly into the array locations.
365      Alternatively, we could use VecSetValues() or VecSetValuesLocal().
366   */
367   for (i=mybase; i<myend; i++) {
368     x = h*(PetscReal)i;
369     s_localptr[i-mybase] = (t + 1.0)*(1.0 + x*x);
370   }
371 
372   /*
373      Restore vector
374   */
375   CHKERRQ(VecRestoreArray(solution,&s_localptr));
376   return 0;
377 }
378 /* --------------------------------------------------------------------- */
379 /*
380    Monitor - User-provided routine to monitor the solution computed at
381    each timestep.  This example plots the solution and computes the
382    error in two different norms.
383 
384    Input Parameters:
385    ts     - the timestep context
386    step   - the count of the current step (with 0 meaning the
387             initial condition)
388    time   - the current time
389    u      - the solution at this timestep
390    ctx    - the user-provided context for this monitoring routine.
391             In this case we use the application context which contains
392             information about the problem size, workspace and the exact
393             solution.
394 */
395 PetscErrorCode Monitor(TS ts,PetscInt step,PetscReal time,Vec u,void *ctx)
396 {
397   AppCtx         *appctx = (AppCtx*) ctx;   /* user-defined application context */
398   PetscReal      en2,en2s,enmax;
399   PetscDraw      draw;
400 
401   /*
402      We use the default X windows viewer
403              PETSC_VIEWER_DRAW_(appctx->comm)
404      that is associated with the current communicator. This saves
405      the effort of calling PetscViewerDrawOpen() to create the window.
406      Note that if we wished to plot several items in separate windows we
407      would create each viewer with PetscViewerDrawOpen() and store them in
408      the application context, appctx.
409 
410      PetscReal buffering makes graphics look better.
411   */
412   CHKERRQ(PetscViewerDrawGetDraw(PETSC_VIEWER_DRAW_(appctx->comm),0,&draw));
413   CHKERRQ(PetscDrawSetDoubleBuffer(draw));
414   CHKERRQ(VecView(u,PETSC_VIEWER_DRAW_(appctx->comm)));
415 
416   /*
417      Compute the exact solution at this timestep
418   */
419   CHKERRQ(ExactSolution(time,appctx->solution,appctx));
420 
421   /*
422      Print debugging information if desired
423   */
424   if (appctx->debug) {
425     CHKERRQ(PetscPrintf(appctx->comm,"Computed solution vector\n"));
426     CHKERRQ(VecView(u,PETSC_VIEWER_STDOUT_WORLD));
427     CHKERRQ(PetscPrintf(appctx->comm,"Exact solution vector\n"));
428     CHKERRQ(VecView(appctx->solution,PETSC_VIEWER_STDOUT_WORLD));
429   }
430 
431   /*
432      Compute the 2-norm and max-norm of the error
433   */
434   CHKERRQ(VecAXPY(appctx->solution,-1.0,u));
435   CHKERRQ(VecNorm(appctx->solution,NORM_2,&en2));
436   en2s = PetscSqrtReal(appctx->h)*en2;  /* scale the 2-norm by the grid spacing */
437   CHKERRQ(VecNorm(appctx->solution,NORM_MAX,&enmax));
438 
439   /*
440      PetscPrintf() causes only the first processor in this
441      communicator to print the timestep information.
442   */
443   CHKERRQ(PetscPrintf(appctx->comm,"Timestep %D: time = %g,2-norm error = %g, max norm error = %g\n",step,(double)time,(double)en2s,(double)enmax));
444 
445   /*
446      Print debugging information if desired
447    */
448   /*  if (appctx->debug) {
449      CHKERRQ(PetscPrintf(appctx->comm,"Error vector\n"));
450      CHKERRQ(VecView(appctx->solution,PETSC_VIEWER_STDOUT_WORLD));
451    } */
452   return 0;
453 }
454 /* --------------------------------------------------------------------- */
455 /*
456    RHSFunction - User-provided routine that evalues the right-hand-side
457    function of the ODE.  This routine is set in the main program by
458    calling TSSetRHSFunction().  We compute:
459           global_out = F(global_in)
460 
461    Input Parameters:
462    ts         - timesteping context
463    t          - current time
464    global_in  - vector containing the current iterate
465    ctx        - (optional) user-provided context for function evaluation.
466                 In this case we use the appctx defined above.
467 
468    Output Parameter:
469    global_out - vector containing the newly evaluated function
470 */
471 PetscErrorCode RHSFunction(TS ts,PetscReal t,Vec global_in,Vec global_out,void *ctx)
472 {
473   AppCtx            *appctx   = (AppCtx*) ctx;     /* user-defined application context */
474   DM                da        = appctx->da;        /* distributed array */
475   Vec               local_in  = appctx->u_local;   /* local ghosted input vector */
476   Vec               localwork = appctx->localwork; /* local ghosted work vector */
477   PetscInt          i,localsize;
478   PetscMPIInt       rank,size;
479   PetscScalar       *copyptr,sc;
480   const PetscScalar *localptr;
481 
482   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
483      Get ready for local function computations
484      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
485   /*
486      Scatter ghost points to local vector, using the 2-step process
487         DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
488      By placing code between these two statements, computations can be
489      done while messages are in transition.
490   */
491   CHKERRQ(DMGlobalToLocalBegin(da,global_in,INSERT_VALUES,local_in));
492   CHKERRQ(DMGlobalToLocalEnd(da,global_in,INSERT_VALUES,local_in));
493 
494   /*
495       Access directly the values in our local INPUT work array
496   */
497   CHKERRQ(VecGetArrayRead(local_in,&localptr));
498 
499   /*
500       Access directly the values in our local OUTPUT work array
501   */
502   CHKERRQ(VecGetArray(localwork,&copyptr));
503 
504   sc = 1.0/(appctx->h*appctx->h*2.0*(1.0+t)*(1.0+t));
505 
506   /*
507       Evaluate our function on the nodes owned by this processor
508   */
509   CHKERRQ(VecGetLocalSize(local_in,&localsize));
510 
511   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
512      Compute entries for the locally owned part
513      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
514 
515   /*
516      Handle boundary conditions: This is done by using the boundary condition
517         u(t,boundary) = g(t,boundary)
518      for some function g. Now take the derivative with respect to t to obtain
519         u_{t}(t,boundary) = g_{t}(t,boundary)
520 
521      In our case, u(t,0) = t + 1, so that u_{t}(t,0) = 1
522              and  u(t,1) = 2t+ 2, so that u_{t}(t,1) = 2
523   */
524   CHKERRMPI(MPI_Comm_rank(appctx->comm,&rank));
525   CHKERRMPI(MPI_Comm_size(appctx->comm,&size));
526   if (rank == 0) copyptr[0] = 1.0;
527   if (rank == size-1) copyptr[localsize-1] = (t < .5) ? 2.0 : 0.0;
528 
529   /*
530      Handle the interior nodes where the PDE is replace by finite
531      difference operators.
532   */
533   for (i=1; i<localsize-1; i++) copyptr[i] =  localptr[i] * sc * (localptr[i+1] + localptr[i-1] - 2.0*localptr[i]);
534 
535   /*
536      Restore vectors
537   */
538   CHKERRQ(VecRestoreArrayRead(local_in,&localptr));
539   CHKERRQ(VecRestoreArray(localwork,&copyptr));
540 
541   /*
542      Insert values from the local OUTPUT vector into the global
543      output vector
544   */
545   CHKERRQ(DMLocalToGlobalBegin(da,localwork,INSERT_VALUES,global_out));
546   CHKERRQ(DMLocalToGlobalEnd(da,localwork,INSERT_VALUES,global_out));
547 
548   /* Print debugging information if desired */
549   /*  if (appctx->debug) {
550      CHKERRQ(PetscPrintf(appctx->comm,"RHS function vector\n"));
551      CHKERRQ(VecView(global_out,PETSC_VIEWER_STDOUT_WORLD));
552    } */
553 
554   return 0;
555 }
556 /* --------------------------------------------------------------------- */
557 /*
558    RHSJacobian - User-provided routine to compute the Jacobian of
559    the nonlinear right-hand-side function of the ODE.
560 
561    Input Parameters:
562    ts - the TS context
563    t - current time
564    global_in - global input vector
565    dummy - optional user-defined context, as set by TSetRHSJacobian()
566 
567    Output Parameters:
568    AA - Jacobian matrix
569    BB - optionally different preconditioning matrix
570    str - flag indicating matrix structure
571 
572   Notes:
573   RHSJacobian computes entries for the locally owned part of the Jacobian.
574    - Currently, all PETSc parallel matrix formats are partitioned by
575      contiguous chunks of rows across the processors.
576    - Each processor needs to insert only elements that it owns
577      locally (but any non-local elements will be sent to the
578      appropriate processor during matrix assembly).
579    - Always specify global row and columns of matrix entries when
580      using MatSetValues().
581    - Here, we set all entries for a particular row at once.
582    - Note that MatSetValues() uses 0-based row and column numbers
583      in Fortran as well as in C.
584 */
585 PetscErrorCode RHSJacobian(TS ts,PetscReal t,Vec global_in,Mat AA,Mat B,void *ctx)
586 {
587   AppCtx            *appctx  = (AppCtx*)ctx;    /* user-defined application context */
588   Vec               local_in = appctx->u_local;   /* local ghosted input vector */
589   DM                da       = appctx->da;        /* distributed array */
590   PetscScalar       v[3],sc;
591   const PetscScalar *localptr;
592   PetscInt          i,mstart,mend,mstarts,mends,idx[3],is;
593 
594   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
595      Get ready for local Jacobian computations
596      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
597   /*
598      Scatter ghost points to local vector, using the 2-step process
599         DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
600      By placing code between these two statements, computations can be
601      done while messages are in transition.
602   */
603   CHKERRQ(DMGlobalToLocalBegin(da,global_in,INSERT_VALUES,local_in));
604   CHKERRQ(DMGlobalToLocalEnd(da,global_in,INSERT_VALUES,local_in));
605 
606   /*
607      Get pointer to vector data
608   */
609   CHKERRQ(VecGetArrayRead(local_in,&localptr));
610 
611   /*
612      Get starting and ending locally owned rows of the matrix
613   */
614   CHKERRQ(MatGetOwnershipRange(B,&mstarts,&mends));
615   mstart = mstarts; mend = mends;
616 
617   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
618      Compute entries for the locally owned part of the Jacobian.
619       - Currently, all PETSc parallel matrix formats are partitioned by
620         contiguous chunks of rows across the processors.
621       - Each processor needs to insert only elements that it owns
622         locally (but any non-local elements will be sent to the
623         appropriate processor during matrix assembly).
624       - Here, we set all entries for a particular row at once.
625       - We can set matrix entries either using either
626         MatSetValuesLocal() or MatSetValues().
627      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
628 
629   /*
630      Set matrix rows corresponding to boundary data
631   */
632   if (mstart == 0) {
633     v[0] = 0.0;
634     CHKERRQ(MatSetValues(B,1,&mstart,1,&mstart,v,INSERT_VALUES));
635     mstart++;
636   }
637   if (mend == appctx->m) {
638     mend--;
639     v[0] = 0.0;
640     CHKERRQ(MatSetValues(B,1,&mend,1,&mend,v,INSERT_VALUES));
641   }
642 
643   /*
644      Set matrix rows corresponding to interior data.  We construct the
645      matrix one row at a time.
646   */
647   sc = 1.0/(appctx->h*appctx->h*2.0*(1.0+t)*(1.0+t));
648   for (i=mstart; i<mend; i++) {
649     idx[0] = i-1; idx[1] = i; idx[2] = i+1;
650     is     = i - mstart + 1;
651     v[0]   = sc*localptr[is];
652     v[1]   = sc*(localptr[is+1] + localptr[is-1] - 4.0*localptr[is]);
653     v[2]   = sc*localptr[is];
654     CHKERRQ(MatSetValues(B,1,&i,3,idx,v,INSERT_VALUES));
655   }
656 
657   /*
658      Restore vector
659   */
660   CHKERRQ(VecRestoreArrayRead(local_in,&localptr));
661 
662   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
663      Complete the matrix assembly process and set some options
664      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
665   /*
666      Assemble matrix, using the 2-step process:
667        MatAssemblyBegin(), MatAssemblyEnd()
668      Computations can be done while messages are in transition
669      by placing code between these two statements.
670   */
671   CHKERRQ(MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY));
672   CHKERRQ(MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY));
673 
674   /*
675      Set and option to indicate that we will never add a new nonzero location
676      to the matrix. If we do, it will generate an error.
677   */
678   CHKERRQ(MatSetOption(B,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE));
679 
680   return 0;
681 }
682 
683 /*TEST
684 
685     test:
686       args: -snes_type vinewtonrsls -ts_type glee -mymonitor -ts_max_steps 10 -nox
687       requires: !single
688 
689 TEST*/
690