xref: /petsc/src/ts/tutorials/ex21.c (revision b122ec5aa1bd4469eb4e0673542fb7de3f411254)
1 
2 static char help[] ="Solves a time-dependent nonlinear PDE with lower and upper bounds on the interior grid points. Uses implicit\n\
3 timestepping.  Runtime options include:\n\
4   -M <xg>, where <xg> = number of grid points\n\
5   -debug : Activate debugging printouts\n\
6   -nox   : Deactivate x-window graphics\n\
7   -ul   : lower bound\n\
8   -uh  : upper bound\n\n";
9 
10 /*
11    Concepts: TS^time-dependent nonlinear problems
12    Concepts: TS^Variational inequality nonlinear solver
13    Processors: n
14 */
15 
16 /* ------------------------------------------------------------------------
17 
18    This is a variation of ex2.c to solve the PDE
19 
20                u * u_xx
21          u_t = ---------
22                2*(t+1)^2
23 
24     with box constraints on the interior grid points
25     ul <= u(t,x) <= uh with x != 0,1
26     on the domain 0 <= x <= 1, with boundary conditions
27          u(t,0) = t + 1,  u(t,1) = 2*t + 2,
28     and initial condition
29          u(0,x) = 1 + x*x.
30 
31     The exact solution is:
32          u(t,x) = (1 + x*x) * (1 + t)
33 
34     We use by default the backward Euler method.
35 
36   ------------------------------------------------------------------------- */
37 
38 /*
39    Include "petscts.h" to use the PETSc timestepping routines. Note that
40    this file automatically includes "petscsys.h" and other lower-level
41    PETSc include files.
42 
43    Include the "petscdmda.h" to allow us to use the distributed array data
44    structures to manage the parallel grid.
45 */
46 #include <petscts.h>
47 #include <petscdm.h>
48 #include <petscdmda.h>
49 #include <petscdraw.h>
50 
51 /*
52    User-defined application context - contains data needed by the
53    application-provided callback routines.
54 */
55 typedef struct {
56   MPI_Comm  comm;           /* communicator */
57   DM        da;             /* distributed array data structure */
58   Vec       localwork;      /* local ghosted work vector */
59   Vec       u_local;        /* local ghosted approximate solution vector */
60   Vec       solution;       /* global exact solution vector */
61   PetscInt  m;              /* total number of grid points */
62   PetscReal h;              /* mesh width: h = 1/(m-1) */
63   PetscBool debug;          /* flag (1 indicates activation of debugging printouts) */
64 } AppCtx;
65 
66 /*
67    User-defined routines, provided below.
68 */
69 extern PetscErrorCode InitialConditions(Vec,AppCtx*);
70 extern PetscErrorCode RHSFunction(TS,PetscReal,Vec,Vec,void*);
71 extern PetscErrorCode RHSJacobian(TS,PetscReal,Vec,Mat,Mat,void*);
72 extern PetscErrorCode Monitor(TS,PetscInt,PetscReal,Vec,void*);
73 extern PetscErrorCode ExactSolution(PetscReal,Vec,AppCtx*);
74 extern PetscErrorCode SetBounds(Vec,Vec,PetscScalar,PetscScalar,AppCtx*);
75 
76 int main(int argc,char **argv)
77 {
78   AppCtx         appctx;                 /* user-defined application context */
79   TS             ts;                     /* timestepping context */
80   Mat            A;                      /* Jacobian matrix data structure */
81   Vec            u;                      /* approximate solution vector */
82   Vec            r;                      /* residual vector */
83   PetscInt       time_steps_max = 1000;  /* default max timesteps */
84   PetscReal      dt;
85   PetscReal      time_total_max = 100.0; /* default max total time */
86   Vec            xl,xu; /* Lower and upper bounds on variables */
87   PetscScalar    ul=0.0,uh = 3.0;
88   PetscBool      mymonitor;
89   PetscReal      bounds[] = {1.0, 3.3};
90 
91   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
92      Initialize program and set problem parameters
93      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
94 
95   CHKERRQ(PetscInitialize(&argc,&argv,(char*)0,help));
96   CHKERRQ(PetscViewerDrawSetBounds(PETSC_VIEWER_DRAW_(PETSC_COMM_WORLD),1,bounds));
97 
98   appctx.comm = PETSC_COMM_WORLD;
99   appctx.m    = 60;
100   CHKERRQ(PetscOptionsGetInt(NULL,NULL,"-M",&appctx.m,NULL));
101   CHKERRQ(PetscOptionsGetScalar(NULL,NULL,"-ul",&ul,NULL));
102   CHKERRQ(PetscOptionsGetScalar(NULL,NULL,"-uh",&uh,NULL));
103   CHKERRQ(PetscOptionsHasName(NULL,NULL,"-debug",&appctx.debug));
104   CHKERRQ(PetscOptionsHasName(NULL,NULL,"-mymonitor",&mymonitor));
105   appctx.h    = 1.0/(appctx.m-1.0);
106 
107   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
108      Create vector data structures
109      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
110 
111   /*
112      Create distributed array (DMDA) to manage parallel grid and vectors
113      and to set up the ghost point communication pattern.  There are M
114      total grid values spread equally among all the processors.
115   */
116   CHKERRQ(DMDACreate1d(PETSC_COMM_WORLD,DM_BOUNDARY_NONE,appctx.m,1,1,NULL,&appctx.da));
117   CHKERRQ(DMSetFromOptions(appctx.da));
118   CHKERRQ(DMSetUp(appctx.da));
119 
120   /*
121      Extract global and local vectors from DMDA; we use these to store the
122      approximate solution.  Then duplicate these for remaining vectors that
123      have the same types.
124   */
125   CHKERRQ(DMCreateGlobalVector(appctx.da,&u));
126   CHKERRQ(DMCreateLocalVector(appctx.da,&appctx.u_local));
127 
128   /*
129      Create local work vector for use in evaluating right-hand-side function;
130      create global work vector for storing exact solution.
131   */
132   CHKERRQ(VecDuplicate(appctx.u_local,&appctx.localwork));
133   CHKERRQ(VecDuplicate(u,&appctx.solution));
134 
135   /* Create residual vector */
136   CHKERRQ(VecDuplicate(u,&r));
137   /* Create lower and upper bound vectors */
138   CHKERRQ(VecDuplicate(u,&xl));
139   CHKERRQ(VecDuplicate(u,&xu));
140   CHKERRQ(SetBounds(xl,xu,ul,uh,&appctx));
141 
142   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
143      Create timestepping solver context; set callback routine for
144      right-hand-side function evaluation.
145      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
146 
147   CHKERRQ(TSCreate(PETSC_COMM_WORLD,&ts));
148   CHKERRQ(TSSetProblemType(ts,TS_NONLINEAR));
149   CHKERRQ(TSSetRHSFunction(ts,r,RHSFunction,&appctx));
150 
151   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
152      Set optional user-defined monitoring routine
153      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
154 
155   if (mymonitor) {
156     CHKERRQ(TSMonitorSet(ts,Monitor,&appctx,NULL));
157   }
158 
159   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
160      For nonlinear problems, the user can provide a Jacobian evaluation
161      routine (or use a finite differencing approximation).
162 
163      Create matrix data structure; set Jacobian evaluation routine.
164      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
165 
166   CHKERRQ(MatCreate(PETSC_COMM_WORLD,&A));
167   CHKERRQ(MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,appctx.m,appctx.m));
168   CHKERRQ(MatSetFromOptions(A));
169   CHKERRQ(MatSetUp(A));
170   CHKERRQ(TSSetRHSJacobian(ts,A,A,RHSJacobian,&appctx));
171 
172   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
173      Set solution vector and initial timestep
174      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
175 
176   dt   = appctx.h/2.0;
177   CHKERRQ(TSSetTimeStep(ts,dt));
178 
179   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
180      Customize timestepping solver:
181        - Set the solution method to be the Backward Euler method.
182        - Set timestepping duration info
183      Then set runtime options, which can override these defaults.
184      For example,
185           -ts_max_steps <maxsteps> -ts_max_time <maxtime>
186      to override the defaults set by TSSetMaxSteps()/TSSetMaxTime().
187      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
188 
189   CHKERRQ(TSSetType(ts,TSBEULER));
190   CHKERRQ(TSSetMaxSteps(ts,time_steps_max));
191   CHKERRQ(TSSetMaxTime(ts,time_total_max));
192   CHKERRQ(TSSetExactFinalTime(ts,TS_EXACTFINALTIME_STEPOVER));
193   /* Set lower and upper bound on the solution vector for each time step */
194   CHKERRQ(TSVISetVariableBounds(ts,xl,xu));
195   CHKERRQ(TSSetFromOptions(ts));
196 
197   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
198      Solve the problem
199      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
200 
201   /*
202      Evaluate initial conditions
203   */
204   CHKERRQ(InitialConditions(u,&appctx));
205 
206   /*
207      Run the timestepping solver
208   */
209   CHKERRQ(TSSolve(ts,u));
210 
211   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
212      Free work space.  All PETSc objects should be destroyed when they
213      are no longer needed.
214      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
215 
216   CHKERRQ(VecDestroy(&r));
217   CHKERRQ(VecDestroy(&xl));
218   CHKERRQ(VecDestroy(&xu));
219   CHKERRQ(TSDestroy(&ts));
220   CHKERRQ(VecDestroy(&u));
221   CHKERRQ(MatDestroy(&A));
222   CHKERRQ(DMDestroy(&appctx.da));
223   CHKERRQ(VecDestroy(&appctx.localwork));
224   CHKERRQ(VecDestroy(&appctx.solution));
225   CHKERRQ(VecDestroy(&appctx.u_local));
226 
227   /*
228      Always call PetscFinalize() before exiting a program.  This routine
229        - finalizes the PETSc libraries as well as MPI
230        - provides summary and diagnostic information if certain runtime
231          options are chosen (e.g., -log_view).
232   */
233   CHKERRQ(PetscFinalize());
234   return 0;
235 }
236 /* --------------------------------------------------------------------- */
237 /*
238    InitialConditions - Computes the solution at the initial time.
239 
240    Input Parameters:
241    u - uninitialized solution vector (global)
242    appctx - user-defined application context
243 
244    Output Parameter:
245    u - vector with solution at initial time (global)
246 */
247 PetscErrorCode InitialConditions(Vec u,AppCtx *appctx)
248 {
249   PetscScalar    *u_localptr,h = appctx->h,x;
250   PetscInt       i,mybase,myend;
251 
252   /*
253      Determine starting point of each processor's range of
254      grid values.
255   */
256   CHKERRQ(VecGetOwnershipRange(u,&mybase,&myend));
257 
258   /*
259     Get a pointer to vector data.
260     - For default PETSc vectors, VecGetArray() returns a pointer to
261       the data array.  Otherwise, the routine is implementation dependent.
262     - You MUST call VecRestoreArray() when you no longer need access to
263       the array.
264     - Note that the Fortran interface to VecGetArray() differs from the
265       C version.  See the users manual for details.
266   */
267   CHKERRQ(VecGetArray(u,&u_localptr));
268 
269   /*
270      We initialize the solution array by simply writing the solution
271      directly into the array locations.  Alternatively, we could use
272      VecSetValues() or VecSetValuesLocal().
273   */
274   for (i=mybase; i<myend; i++) {
275     x = h*(PetscReal)i; /* current location in global grid */
276     u_localptr[i-mybase] = 1.0 + x*x;
277   }
278 
279   /*
280      Restore vector
281   */
282   CHKERRQ(VecRestoreArray(u,&u_localptr));
283 
284   /*
285      Print debugging information if desired
286   */
287   if (appctx->debug) {
288      CHKERRQ(PetscPrintf(appctx->comm,"initial guess vector\n"));
289      CHKERRQ(VecView(u,PETSC_VIEWER_STDOUT_WORLD));
290   }
291 
292   return 0;
293 }
294 
295 /* --------------------------------------------------------------------- */
296 /*
297   SetBounds - Sets the lower and uper bounds on the interior points
298 
299   Input parameters:
300   xl - vector of lower bounds
301   xu - vector of upper bounds
302   ul - constant lower bound for all variables
303   uh - constant upper bound for all variables
304   appctx - Application context
305  */
306 PetscErrorCode SetBounds(Vec xl, Vec xu, PetscScalar ul, PetscScalar uh,AppCtx *appctx)
307 {
308   PetscScalar       *l,*u;
309   PetscMPIInt       rank,size;
310   PetscInt          localsize;
311 
312   PetscFunctionBeginUser;
313   CHKERRQ(VecSet(xl,ul));
314   CHKERRQ(VecSet(xu,uh));
315   CHKERRQ(VecGetLocalSize(xl,&localsize));
316   CHKERRQ(VecGetArray(xl,&l));
317   CHKERRQ(VecGetArray(xu,&u));
318 
319   CHKERRMPI(MPI_Comm_rank(appctx->comm,&rank));
320   CHKERRMPI(MPI_Comm_size(appctx->comm,&size));
321   if (rank == 0) {
322     l[0] = -PETSC_INFINITY;
323     u[0] =  PETSC_INFINITY;
324   }
325   if (rank == size-1) {
326     l[localsize-1] = -PETSC_INFINITY;
327     u[localsize-1] = PETSC_INFINITY;
328   }
329   CHKERRQ(VecRestoreArray(xl,&l));
330   CHKERRQ(VecRestoreArray(xu,&u));
331   PetscFunctionReturn(0);
332 }
333 
334 /* --------------------------------------------------------------------- */
335 /*
336    ExactSolution - Computes the exact solution at a given time.
337 
338    Input Parameters:
339    t - current time
340    solution - vector in which exact solution will be computed
341    appctx - user-defined application context
342 
343    Output Parameter:
344    solution - vector with the newly computed exact solution
345 */
346 PetscErrorCode ExactSolution(PetscReal t,Vec solution,AppCtx *appctx)
347 {
348   PetscScalar    *s_localptr,h = appctx->h,x;
349   PetscInt       i,mybase,myend;
350 
351   /*
352      Determine starting and ending points of each processor's
353      range of grid values
354   */
355   CHKERRQ(VecGetOwnershipRange(solution,&mybase,&myend));
356 
357   /*
358      Get a pointer to vector data.
359   */
360   CHKERRQ(VecGetArray(solution,&s_localptr));
361 
362   /*
363      Simply write the solution directly into the array locations.
364      Alternatively, we could use VecSetValues() or VecSetValuesLocal().
365   */
366   for (i=mybase; i<myend; i++) {
367     x = h*(PetscReal)i;
368     s_localptr[i-mybase] = (t + 1.0)*(1.0 + x*x);
369   }
370 
371   /*
372      Restore vector
373   */
374   CHKERRQ(VecRestoreArray(solution,&s_localptr));
375   return 0;
376 }
377 /* --------------------------------------------------------------------- */
378 /*
379    Monitor - User-provided routine to monitor the solution computed at
380    each timestep.  This example plots the solution and computes the
381    error in two different norms.
382 
383    Input Parameters:
384    ts     - the timestep context
385    step   - the count of the current step (with 0 meaning the
386             initial condition)
387    time   - the current time
388    u      - the solution at this timestep
389    ctx    - the user-provided context for this monitoring routine.
390             In this case we use the application context which contains
391             information about the problem size, workspace and the exact
392             solution.
393 */
394 PetscErrorCode Monitor(TS ts,PetscInt step,PetscReal time,Vec u,void *ctx)
395 {
396   AppCtx         *appctx = (AppCtx*) ctx;   /* user-defined application context */
397   PetscReal      en2,en2s,enmax;
398   PetscDraw      draw;
399 
400   /*
401      We use the default X windows viewer
402              PETSC_VIEWER_DRAW_(appctx->comm)
403      that is associated with the current communicator. This saves
404      the effort of calling PetscViewerDrawOpen() to create the window.
405      Note that if we wished to plot several items in separate windows we
406      would create each viewer with PetscViewerDrawOpen() and store them in
407      the application context, appctx.
408 
409      PetscReal buffering makes graphics look better.
410   */
411   CHKERRQ(PetscViewerDrawGetDraw(PETSC_VIEWER_DRAW_(appctx->comm),0,&draw));
412   CHKERRQ(PetscDrawSetDoubleBuffer(draw));
413   CHKERRQ(VecView(u,PETSC_VIEWER_DRAW_(appctx->comm)));
414 
415   /*
416      Compute the exact solution at this timestep
417   */
418   CHKERRQ(ExactSolution(time,appctx->solution,appctx));
419 
420   /*
421      Print debugging information if desired
422   */
423   if (appctx->debug) {
424     CHKERRQ(PetscPrintf(appctx->comm,"Computed solution vector\n"));
425     CHKERRQ(VecView(u,PETSC_VIEWER_STDOUT_WORLD));
426     CHKERRQ(PetscPrintf(appctx->comm,"Exact solution vector\n"));
427     CHKERRQ(VecView(appctx->solution,PETSC_VIEWER_STDOUT_WORLD));
428   }
429 
430   /*
431      Compute the 2-norm and max-norm of the error
432   */
433   CHKERRQ(VecAXPY(appctx->solution,-1.0,u));
434   CHKERRQ(VecNorm(appctx->solution,NORM_2,&en2));
435   en2s = PetscSqrtReal(appctx->h)*en2;  /* scale the 2-norm by the grid spacing */
436   CHKERRQ(VecNorm(appctx->solution,NORM_MAX,&enmax));
437 
438   /*
439      PetscPrintf() causes only the first processor in this
440      communicator to print the timestep information.
441   */
442   CHKERRQ(PetscPrintf(appctx->comm,"Timestep %D: time = %g,2-norm error = %g, max norm error = %g\n",step,(double)time,(double)en2s,(double)enmax));
443 
444   /*
445      Print debugging information if desired
446    */
447   /*  if (appctx->debug) {
448      CHKERRQ(PetscPrintf(appctx->comm,"Error vector\n"));
449      CHKERRQ(VecView(appctx->solution,PETSC_VIEWER_STDOUT_WORLD));
450    } */
451   return 0;
452 }
453 /* --------------------------------------------------------------------- */
454 /*
455    RHSFunction - User-provided routine that evalues the right-hand-side
456    function of the ODE.  This routine is set in the main program by
457    calling TSSetRHSFunction().  We compute:
458           global_out = F(global_in)
459 
460    Input Parameters:
461    ts         - timesteping context
462    t          - current time
463    global_in  - vector containing the current iterate
464    ctx        - (optional) user-provided context for function evaluation.
465                 In this case we use the appctx defined above.
466 
467    Output Parameter:
468    global_out - vector containing the newly evaluated function
469 */
470 PetscErrorCode RHSFunction(TS ts,PetscReal t,Vec global_in,Vec global_out,void *ctx)
471 {
472   AppCtx            *appctx   = (AppCtx*) ctx;     /* user-defined application context */
473   DM                da        = appctx->da;        /* distributed array */
474   Vec               local_in  = appctx->u_local;   /* local ghosted input vector */
475   Vec               localwork = appctx->localwork; /* local ghosted work vector */
476   PetscInt          i,localsize;
477   PetscMPIInt       rank,size;
478   PetscScalar       *copyptr,sc;
479   const PetscScalar *localptr;
480 
481   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
482      Get ready for local function computations
483      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
484   /*
485      Scatter ghost points to local vector, using the 2-step process
486         DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
487      By placing code between these two statements, computations can be
488      done while messages are in transition.
489   */
490   CHKERRQ(DMGlobalToLocalBegin(da,global_in,INSERT_VALUES,local_in));
491   CHKERRQ(DMGlobalToLocalEnd(da,global_in,INSERT_VALUES,local_in));
492 
493   /*
494       Access directly the values in our local INPUT work array
495   */
496   CHKERRQ(VecGetArrayRead(local_in,&localptr));
497 
498   /*
499       Access directly the values in our local OUTPUT work array
500   */
501   CHKERRQ(VecGetArray(localwork,&copyptr));
502 
503   sc = 1.0/(appctx->h*appctx->h*2.0*(1.0+t)*(1.0+t));
504 
505   /*
506       Evaluate our function on the nodes owned by this processor
507   */
508   CHKERRQ(VecGetLocalSize(local_in,&localsize));
509 
510   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
511      Compute entries for the locally owned part
512      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
513 
514   /*
515      Handle boundary conditions: This is done by using the boundary condition
516         u(t,boundary) = g(t,boundary)
517      for some function g. Now take the derivative with respect to t to obtain
518         u_{t}(t,boundary) = g_{t}(t,boundary)
519 
520      In our case, u(t,0) = t + 1, so that u_{t}(t,0) = 1
521              and  u(t,1) = 2t+ 2, so that u_{t}(t,1) = 2
522   */
523   CHKERRMPI(MPI_Comm_rank(appctx->comm,&rank));
524   CHKERRMPI(MPI_Comm_size(appctx->comm,&size));
525   if (rank == 0) copyptr[0] = 1.0;
526   if (rank == size-1) copyptr[localsize-1] = (t < .5) ? 2.0 : 0.0;
527 
528   /*
529      Handle the interior nodes where the PDE is replace by finite
530      difference operators.
531   */
532   for (i=1; i<localsize-1; i++) copyptr[i] =  localptr[i] * sc * (localptr[i+1] + localptr[i-1] - 2.0*localptr[i]);
533 
534   /*
535      Restore vectors
536   */
537   CHKERRQ(VecRestoreArrayRead(local_in,&localptr));
538   CHKERRQ(VecRestoreArray(localwork,&copyptr));
539 
540   /*
541      Insert values from the local OUTPUT vector into the global
542      output vector
543   */
544   CHKERRQ(DMLocalToGlobalBegin(da,localwork,INSERT_VALUES,global_out));
545   CHKERRQ(DMLocalToGlobalEnd(da,localwork,INSERT_VALUES,global_out));
546 
547   /* Print debugging information if desired */
548   /*  if (appctx->debug) {
549      CHKERRQ(PetscPrintf(appctx->comm,"RHS function vector\n"));
550      CHKERRQ(VecView(global_out,PETSC_VIEWER_STDOUT_WORLD));
551    } */
552 
553   return 0;
554 }
555 /* --------------------------------------------------------------------- */
556 /*
557    RHSJacobian - User-provided routine to compute the Jacobian of
558    the nonlinear right-hand-side function of the ODE.
559 
560    Input Parameters:
561    ts - the TS context
562    t - current time
563    global_in - global input vector
564    dummy - optional user-defined context, as set by TSetRHSJacobian()
565 
566    Output Parameters:
567    AA - Jacobian matrix
568    BB - optionally different preconditioning matrix
569    str - flag indicating matrix structure
570 
571   Notes:
572   RHSJacobian computes entries for the locally owned part of the Jacobian.
573    - Currently, all PETSc parallel matrix formats are partitioned by
574      contiguous chunks of rows across the processors.
575    - Each processor needs to insert only elements that it owns
576      locally (but any non-local elements will be sent to the
577      appropriate processor during matrix assembly).
578    - Always specify global row and columns of matrix entries when
579      using MatSetValues().
580    - Here, we set all entries for a particular row at once.
581    - Note that MatSetValues() uses 0-based row and column numbers
582      in Fortran as well as in C.
583 */
584 PetscErrorCode RHSJacobian(TS ts,PetscReal t,Vec global_in,Mat AA,Mat B,void *ctx)
585 {
586   AppCtx            *appctx  = (AppCtx*)ctx;    /* user-defined application context */
587   Vec               local_in = appctx->u_local;   /* local ghosted input vector */
588   DM                da       = appctx->da;        /* distributed array */
589   PetscScalar       v[3],sc;
590   const PetscScalar *localptr;
591   PetscInt          i,mstart,mend,mstarts,mends,idx[3],is;
592 
593   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
594      Get ready for local Jacobian computations
595      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
596   /*
597      Scatter ghost points to local vector, using the 2-step process
598         DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
599      By placing code between these two statements, computations can be
600      done while messages are in transition.
601   */
602   CHKERRQ(DMGlobalToLocalBegin(da,global_in,INSERT_VALUES,local_in));
603   CHKERRQ(DMGlobalToLocalEnd(da,global_in,INSERT_VALUES,local_in));
604 
605   /*
606      Get pointer to vector data
607   */
608   CHKERRQ(VecGetArrayRead(local_in,&localptr));
609 
610   /*
611      Get starting and ending locally owned rows of the matrix
612   */
613   CHKERRQ(MatGetOwnershipRange(B,&mstarts,&mends));
614   mstart = mstarts; mend = mends;
615 
616   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
617      Compute entries for the locally owned part of the Jacobian.
618       - Currently, all PETSc parallel matrix formats are partitioned by
619         contiguous chunks of rows across the processors.
620       - Each processor needs to insert only elements that it owns
621         locally (but any non-local elements will be sent to the
622         appropriate processor during matrix assembly).
623       - Here, we set all entries for a particular row at once.
624       - We can set matrix entries either using either
625         MatSetValuesLocal() or MatSetValues().
626      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
627 
628   /*
629      Set matrix rows corresponding to boundary data
630   */
631   if (mstart == 0) {
632     v[0] = 0.0;
633     CHKERRQ(MatSetValues(B,1,&mstart,1,&mstart,v,INSERT_VALUES));
634     mstart++;
635   }
636   if (mend == appctx->m) {
637     mend--;
638     v[0] = 0.0;
639     CHKERRQ(MatSetValues(B,1,&mend,1,&mend,v,INSERT_VALUES));
640   }
641 
642   /*
643      Set matrix rows corresponding to interior data.  We construct the
644      matrix one row at a time.
645   */
646   sc = 1.0/(appctx->h*appctx->h*2.0*(1.0+t)*(1.0+t));
647   for (i=mstart; i<mend; i++) {
648     idx[0] = i-1; idx[1] = i; idx[2] = i+1;
649     is     = i - mstart + 1;
650     v[0]   = sc*localptr[is];
651     v[1]   = sc*(localptr[is+1] + localptr[is-1] - 4.0*localptr[is]);
652     v[2]   = sc*localptr[is];
653     CHKERRQ(MatSetValues(B,1,&i,3,idx,v,INSERT_VALUES));
654   }
655 
656   /*
657      Restore vector
658   */
659   CHKERRQ(VecRestoreArrayRead(local_in,&localptr));
660 
661   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
662      Complete the matrix assembly process and set some options
663      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
664   /*
665      Assemble matrix, using the 2-step process:
666        MatAssemblyBegin(), MatAssemblyEnd()
667      Computations can be done while messages are in transition
668      by placing code between these two statements.
669   */
670   CHKERRQ(MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY));
671   CHKERRQ(MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY));
672 
673   /*
674      Set and option to indicate that we will never add a new nonzero location
675      to the matrix. If we do, it will generate an error.
676   */
677   CHKERRQ(MatSetOption(B,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE));
678 
679   return 0;
680 }
681 
682 /*TEST
683 
684     test:
685       args: -snes_type vinewtonrsls -ts_type glee -mymonitor -ts_max_steps 10 -nox
686       requires: !single
687 
688 TEST*/
689