xref: /petsc/src/ts/tutorials/ex2.c (revision 4e278199b78715991f5c71ebbd945c1489263e6c) !
1 
2 static char help[] ="Solves a time-dependent nonlinear PDE. Uses implicit\n\
3 timestepping.  Runtime options include:\n\
4   -M <xg>, where <xg> = number of grid points\n\
5   -debug : Activate debugging printouts\n\
6   -nox   : Deactivate x-window graphics\n\n";
7 
8 /*
9    Concepts: TS^time-dependent nonlinear problems
10    Processors: n
11 */
12 
13 /* ------------------------------------------------------------------------
14 
15    This program solves the PDE
16 
17                u * u_xx
18          u_t = ---------
19                2*(t+1)^2
20 
21     on the domain 0 <= x <= 1, with boundary conditions
22          u(t,0) = t + 1,  u(t,1) = 2*t + 2,
23     and initial condition
24          u(0,x) = 1 + x*x.
25 
26     The exact solution is:
27          u(t,x) = (1 + x*x) * (1 + t)
28 
29     Note that since the solution is linear in time and quadratic in x,
30     the finite difference scheme actually computes the "exact" solution.
31 
32     We use by default the backward Euler method.
33 
34   ------------------------------------------------------------------------- */
35 
36 /*
37    Include "petscts.h" to use the PETSc timestepping routines. Note that
38    this file automatically includes "petscsys.h" and other lower-level
39    PETSc include files.
40 
41    Include the "petscdmda.h" to allow us to use the distributed array data
42    structures to manage the parallel grid.
43 */
44 #include <petscts.h>
45 #include <petscdm.h>
46 #include <petscdmda.h>
47 #include <petscdraw.h>
48 
49 /*
50    User-defined application context - contains data needed by the
51    application-provided callback routines.
52 */
53 typedef struct {
54   MPI_Comm  comm;           /* communicator */
55   DM        da;             /* distributed array data structure */
56   Vec       localwork;      /* local ghosted work vector */
57   Vec       u_local;        /* local ghosted approximate solution vector */
58   Vec       solution;       /* global exact solution vector */
59   PetscInt  m;              /* total number of grid points */
60   PetscReal h;              /* mesh width: h = 1/(m-1) */
61   PetscBool debug;          /* flag (1 indicates activation of debugging printouts) */
62 } AppCtx;
63 
64 /*
65    User-defined routines, provided below.
66 */
67 extern PetscErrorCode InitialConditions(Vec,AppCtx*);
68 extern PetscErrorCode RHSFunction(TS,PetscReal,Vec,Vec,void*);
69 extern PetscErrorCode RHSJacobian(TS,PetscReal,Vec,Mat,Mat,void*);
70 extern PetscErrorCode Monitor(TS,PetscInt,PetscReal,Vec,void*);
71 extern PetscErrorCode ExactSolution(PetscReal,Vec,AppCtx*);
72 
73 int main(int argc,char **argv)
74 {
75   AppCtx         appctx;                 /* user-defined application context */
76   TS             ts;                     /* timestepping context */
77   Mat            A;                      /* Jacobian matrix data structure */
78   Vec            u;                      /* approximate solution vector */
79   PetscInt       time_steps_max = 100;  /* default max timesteps */
80   PetscErrorCode ierr;
81   PetscReal      dt;
82   PetscReal      time_total_max = 100.0; /* default max total time */
83   PetscBool      mymonitor      = PETSC_FALSE;
84   PetscReal      bounds[]       = {1.0, 3.3};
85 
86   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
87      Initialize program and set problem parameters
88      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
89 
90   ierr = PetscInitialize(&argc,&argv,(char*)0,help);if (ierr) return ierr;
91   ierr = PetscViewerDrawSetBounds(PETSC_VIEWER_DRAW_(PETSC_COMM_WORLD),1,bounds);CHKERRQ(ierr);
92 
93   appctx.comm = PETSC_COMM_WORLD;
94   appctx.m    = 60;
95 
96   ierr = PetscOptionsGetInt(NULL,NULL,"-M",&appctx.m,NULL);CHKERRQ(ierr);
97   ierr = PetscOptionsHasName(NULL,NULL,"-debug",&appctx.debug);CHKERRQ(ierr);
98   ierr = PetscOptionsHasName(NULL,NULL,"-mymonitor",&mymonitor);CHKERRQ(ierr);
99 
100   appctx.h    = 1.0/(appctx.m-1.0);
101 
102   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
103      Create vector data structures
104      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
105 
106   /*
107      Create distributed array (DMDA) to manage parallel grid and vectors
108      and to set up the ghost point communication pattern.  There are M
109      total grid values spread equally among all the processors.
110   */
111   ierr = DMDACreate1d(PETSC_COMM_WORLD,DM_BOUNDARY_NONE,appctx.m,1,1,NULL,&appctx.da);CHKERRQ(ierr);
112   ierr = DMSetFromOptions(appctx.da);CHKERRQ(ierr);
113   ierr = DMSetUp(appctx.da);CHKERRQ(ierr);
114 
115   /*
116      Extract global and local vectors from DMDA; we use these to store the
117      approximate solution.  Then duplicate these for remaining vectors that
118      have the same types.
119   */
120   ierr = DMCreateGlobalVector(appctx.da,&u);CHKERRQ(ierr);
121   ierr = DMCreateLocalVector(appctx.da,&appctx.u_local);CHKERRQ(ierr);
122 
123   /*
124      Create local work vector for use in evaluating right-hand-side function;
125      create global work vector for storing exact solution.
126   */
127   ierr = VecDuplicate(appctx.u_local,&appctx.localwork);CHKERRQ(ierr);
128   ierr = VecDuplicate(u,&appctx.solution);CHKERRQ(ierr);
129 
130   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
131      Create timestepping solver context; set callback routine for
132      right-hand-side function evaluation.
133      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
134 
135   ierr = TSCreate(PETSC_COMM_WORLD,&ts);CHKERRQ(ierr);
136   ierr = TSSetProblemType(ts,TS_NONLINEAR);CHKERRQ(ierr);
137   ierr = TSSetRHSFunction(ts,NULL,RHSFunction,&appctx);CHKERRQ(ierr);
138 
139   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
140      Set optional user-defined monitoring routine
141      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
142 
143   if (mymonitor) {
144     ierr = TSMonitorSet(ts,Monitor,&appctx,NULL);CHKERRQ(ierr);
145   }
146 
147   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
148      For nonlinear problems, the user can provide a Jacobian evaluation
149      routine (or use a finite differencing approximation).
150 
151      Create matrix data structure; set Jacobian evaluation routine.
152      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
153 
154   ierr = MatCreate(PETSC_COMM_WORLD,&A);CHKERRQ(ierr);
155   ierr = MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,appctx.m,appctx.m);CHKERRQ(ierr);
156   ierr = MatSetFromOptions(A);CHKERRQ(ierr);
157   ierr = MatSetUp(A);CHKERRQ(ierr);
158   ierr = TSSetRHSJacobian(ts,A,A,RHSJacobian,&appctx);CHKERRQ(ierr);
159 
160   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
161      Set solution vector and initial timestep
162      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
163 
164   dt   = appctx.h/2.0;
165   ierr = TSSetTimeStep(ts,dt);CHKERRQ(ierr);
166 
167   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
168      Customize timestepping solver:
169        - Set the solution method to be the Backward Euler method.
170        - Set timestepping duration info
171      Then set runtime options, which can override these defaults.
172      For example,
173           -ts_max_steps <maxsteps> -ts_max_time <maxtime>
174      to override the defaults set by TSSetMaxSteps()/TSSetMaxTime().
175      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
176 
177   ierr = TSSetType(ts,TSBEULER);CHKERRQ(ierr);
178   ierr = TSSetMaxSteps(ts,time_steps_max);CHKERRQ(ierr);
179   ierr = TSSetMaxTime(ts,time_total_max);CHKERRQ(ierr);
180   ierr = TSSetExactFinalTime(ts,TS_EXACTFINALTIME_STEPOVER);CHKERRQ(ierr);
181   ierr = TSSetFromOptions(ts);CHKERRQ(ierr);
182 
183   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
184      Solve the problem
185      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
186 
187   /*
188      Evaluate initial conditions
189   */
190   ierr = InitialConditions(u,&appctx);CHKERRQ(ierr);
191 
192   /*
193      Run the timestepping solver
194   */
195   ierr = TSSolve(ts,u);CHKERRQ(ierr);
196 
197   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
198      Free work space.  All PETSc objects should be destroyed when they
199      are no longer needed.
200      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
201 
202   ierr = TSDestroy(&ts);CHKERRQ(ierr);
203   ierr = VecDestroy(&u);CHKERRQ(ierr);
204   ierr = MatDestroy(&A);CHKERRQ(ierr);
205   ierr = DMDestroy(&appctx.da);CHKERRQ(ierr);
206   ierr = VecDestroy(&appctx.localwork);CHKERRQ(ierr);
207   ierr = VecDestroy(&appctx.solution);CHKERRQ(ierr);
208   ierr = VecDestroy(&appctx.u_local);CHKERRQ(ierr);
209 
210   /*
211      Always call PetscFinalize() before exiting a program.  This routine
212        - finalizes the PETSc libraries as well as MPI
213        - provides summary and diagnostic information if certain runtime
214          options are chosen (e.g., -log_view).
215   */
216   ierr = PetscFinalize();
217   return ierr;
218 }
219 /* --------------------------------------------------------------------- */
220 /*
221    InitialConditions - Computes the solution at the initial time.
222 
223    Input Parameters:
224    u - uninitialized solution vector (global)
225    appctx - user-defined application context
226 
227    Output Parameter:
228    u - vector with solution at initial time (global)
229 */
230 PetscErrorCode InitialConditions(Vec u,AppCtx *appctx)
231 {
232   PetscScalar    *u_localptr,h = appctx->h,x;
233   PetscInt       i,mybase,myend;
234   PetscErrorCode ierr;
235 
236   /*
237      Determine starting point of each processor's range of
238      grid values.
239   */
240   ierr = VecGetOwnershipRange(u,&mybase,&myend);CHKERRQ(ierr);
241 
242   /*
243     Get a pointer to vector data.
244     - For default PETSc vectors, VecGetArray() returns a pointer to
245       the data array.  Otherwise, the routine is implementation dependent.
246     - You MUST call VecRestoreArray() when you no longer need access to
247       the array.
248     - Note that the Fortran interface to VecGetArray() differs from the
249       C version.  See the users manual for details.
250   */
251   ierr = VecGetArray(u,&u_localptr);CHKERRQ(ierr);
252 
253   /*
254      We initialize the solution array by simply writing the solution
255      directly into the array locations.  Alternatively, we could use
256      VecSetValues() or VecSetValuesLocal().
257   */
258   for (i=mybase; i<myend; i++) {
259     x = h*(PetscReal)i; /* current location in global grid */
260     u_localptr[i-mybase] = 1.0 + x*x;
261   }
262 
263   /*
264      Restore vector
265   */
266   ierr = VecRestoreArray(u,&u_localptr);CHKERRQ(ierr);
267 
268   /*
269      Print debugging information if desired
270   */
271   if (appctx->debug) {
272     ierr = PetscPrintf(appctx->comm,"initial guess vector\n");CHKERRQ(ierr);
273     ierr = VecView(u,PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
274   }
275 
276   return 0;
277 }
278 /* --------------------------------------------------------------------- */
279 /*
280    ExactSolution - Computes the exact solution at a given time.
281 
282    Input Parameters:
283    t - current time
284    solution - vector in which exact solution will be computed
285    appctx - user-defined application context
286 
287    Output Parameter:
288    solution - vector with the newly computed exact solution
289 */
290 PetscErrorCode ExactSolution(PetscReal t,Vec solution,AppCtx *appctx)
291 {
292   PetscScalar    *s_localptr,h = appctx->h,x;
293   PetscInt       i,mybase,myend;
294   PetscErrorCode ierr;
295 
296   /*
297      Determine starting and ending points of each processor's
298      range of grid values
299   */
300   ierr = VecGetOwnershipRange(solution,&mybase,&myend);CHKERRQ(ierr);
301 
302   /*
303      Get a pointer to vector data.
304   */
305   ierr = VecGetArray(solution,&s_localptr);CHKERRQ(ierr);
306 
307   /*
308      Simply write the solution directly into the array locations.
309      Alternatively, we could use VecSetValues() or VecSetValuesLocal().
310   */
311   for (i=mybase; i<myend; i++) {
312     x = h*(PetscReal)i;
313     s_localptr[i-mybase] = (t + 1.0)*(1.0 + x*x);
314   }
315 
316   /*
317      Restore vector
318   */
319   ierr = VecRestoreArray(solution,&s_localptr);CHKERRQ(ierr);
320   return 0;
321 }
322 /* --------------------------------------------------------------------- */
323 /*
324    Monitor - User-provided routine to monitor the solution computed at
325    each timestep.  This example plots the solution and computes the
326    error in two different norms.
327 
328    Input Parameters:
329    ts     - the timestep context
330    step   - the count of the current step (with 0 meaning the
331             initial condition)
332    time   - the current time
333    u      - the solution at this timestep
334    ctx    - the user-provided context for this monitoring routine.
335             In this case we use the application context which contains
336             information about the problem size, workspace and the exact
337             solution.
338 */
339 PetscErrorCode Monitor(TS ts,PetscInt step,PetscReal time,Vec u,void *ctx)
340 {
341   AppCtx         *appctx = (AppCtx*) ctx;   /* user-defined application context */
342   PetscErrorCode ierr;
343   PetscReal      en2,en2s,enmax;
344   PetscDraw      draw;
345 
346   /*
347      We use the default X windows viewer
348              PETSC_VIEWER_DRAW_(appctx->comm)
349      that is associated with the current communicator. This saves
350      the effort of calling PetscViewerDrawOpen() to create the window.
351      Note that if we wished to plot several items in separate windows we
352      would create each viewer with PetscViewerDrawOpen() and store them in
353      the application context, appctx.
354 
355      PetscReal buffering makes graphics look better.
356   */
357   ierr = PetscViewerDrawGetDraw(PETSC_VIEWER_DRAW_(appctx->comm),0,&draw);CHKERRQ(ierr);
358   ierr = PetscDrawSetDoubleBuffer(draw);CHKERRQ(ierr);
359   ierr = VecView(u,PETSC_VIEWER_DRAW_(appctx->comm));CHKERRQ(ierr);
360 
361   /*
362      Compute the exact solution at this timestep
363   */
364   ierr = ExactSolution(time,appctx->solution,appctx);CHKERRQ(ierr);
365 
366   /*
367      Print debugging information if desired
368   */
369   if (appctx->debug) {
370     ierr = PetscPrintf(appctx->comm,"Computed solution vector\n");CHKERRQ(ierr);
371     ierr = VecView(u,PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
372     ierr = PetscPrintf(appctx->comm,"Exact solution vector\n");CHKERRQ(ierr);
373     ierr = VecView(appctx->solution,PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
374   }
375 
376   /*
377      Compute the 2-norm and max-norm of the error
378   */
379   ierr = VecAXPY(appctx->solution,-1.0,u);CHKERRQ(ierr);
380   ierr = VecNorm(appctx->solution,NORM_2,&en2);CHKERRQ(ierr);
381   en2s = PetscSqrtReal(appctx->h)*en2;  /* scale the 2-norm by the grid spacing */
382   ierr = VecNorm(appctx->solution,NORM_MAX,&enmax);CHKERRQ(ierr);
383 
384   /*
385      PetscPrintf() causes only the first processor in this
386      communicator to print the timestep information.
387   */
388   ierr = PetscPrintf(appctx->comm,"Timestep %D: time = %g 2-norm error = %g  max norm error = %g\n",step,(double)time,(double)en2s,(double)enmax);CHKERRQ(ierr);
389 
390   /*
391      Print debugging information if desired
392   */
393   if (appctx->debug) {
394     ierr = PetscPrintf(appctx->comm,"Error vector\n");CHKERRQ(ierr);
395     ierr = VecView(appctx->solution,PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
396   }
397   return 0;
398 }
399 /* --------------------------------------------------------------------- */
400 /*
401    RHSFunction - User-provided routine that evalues the right-hand-side
402    function of the ODE.  This routine is set in the main program by
403    calling TSSetRHSFunction().  We compute:
404           global_out = F(global_in)
405 
406    Input Parameters:
407    ts         - timesteping context
408    t          - current time
409    global_in  - vector containing the current iterate
410    ctx        - (optional) user-provided context for function evaluation.
411                 In this case we use the appctx defined above.
412 
413    Output Parameter:
414    global_out - vector containing the newly evaluated function
415 */
416 PetscErrorCode RHSFunction(TS ts,PetscReal t,Vec global_in,Vec global_out,void *ctx)
417 {
418   AppCtx            *appctx   = (AppCtx*) ctx;     /* user-defined application context */
419   DM                da        = appctx->da;        /* distributed array */
420   Vec               local_in  = appctx->u_local;   /* local ghosted input vector */
421   Vec               localwork = appctx->localwork; /* local ghosted work vector */
422   PetscErrorCode    ierr;
423   PetscInt          i,localsize;
424   PetscMPIInt       rank,size;
425   PetscScalar       *copyptr,sc;
426   const PetscScalar *localptr;
427 
428   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
429      Get ready for local function computations
430      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
431   /*
432      Scatter ghost points to local vector, using the 2-step process
433         DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
434      By placing code between these two statements, computations can be
435      done while messages are in transition.
436   */
437   ierr = DMGlobalToLocalBegin(da,global_in,INSERT_VALUES,local_in);CHKERRQ(ierr);
438   ierr = DMGlobalToLocalEnd(da,global_in,INSERT_VALUES,local_in);CHKERRQ(ierr);
439 
440   /*
441       Access directly the values in our local INPUT work array
442   */
443   ierr = VecGetArrayRead(local_in,&localptr);CHKERRQ(ierr);
444 
445   /*
446       Access directly the values in our local OUTPUT work array
447   */
448   ierr = VecGetArray(localwork,&copyptr);CHKERRQ(ierr);
449 
450   sc = 1.0/(appctx->h*appctx->h*2.0*(1.0+t)*(1.0+t));
451 
452   /*
453       Evaluate our function on the nodes owned by this processor
454   */
455   ierr = VecGetLocalSize(local_in,&localsize);CHKERRQ(ierr);
456 
457   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
458      Compute entries for the locally owned part
459      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
460 
461   /*
462      Handle boundary conditions: This is done by using the boundary condition
463         u(t,boundary) = g(t,boundary)
464      for some function g. Now take the derivative with respect to t to obtain
465         u_{t}(t,boundary) = g_{t}(t,boundary)
466 
467      In our case, u(t,0) = t + 1, so that u_{t}(t,0) = 1
468              and  u(t,1) = 2t+ 2, so that u_{t}(t,1) = 2
469   */
470   ierr = MPI_Comm_rank(appctx->comm,&rank);CHKERRMPI(ierr);
471   ierr = MPI_Comm_size(appctx->comm,&size);CHKERRMPI(ierr);
472   if (!rank)          copyptr[0]           = 1.0;
473   if (rank == size-1) copyptr[localsize-1] = 2.0;
474 
475   /*
476      Handle the interior nodes where the PDE is replace by finite
477      difference operators.
478   */
479   for (i=1; i<localsize-1; i++) copyptr[i] =  localptr[i] * sc * (localptr[i+1] + localptr[i-1] - 2.0*localptr[i]);
480 
481   /*
482      Restore vectors
483   */
484   ierr = VecRestoreArrayRead(local_in,&localptr);CHKERRQ(ierr);
485   ierr = VecRestoreArray(localwork,&copyptr);CHKERRQ(ierr);
486 
487   /*
488      Insert values from the local OUTPUT vector into the global
489      output vector
490   */
491   ierr = DMLocalToGlobalBegin(da,localwork,INSERT_VALUES,global_out);CHKERRQ(ierr);
492   ierr = DMLocalToGlobalEnd(da,localwork,INSERT_VALUES,global_out);CHKERRQ(ierr);
493 
494   /* Print debugging information if desired */
495   if (appctx->debug) {
496     ierr = PetscPrintf(appctx->comm,"RHS function vector\n");CHKERRQ(ierr);
497     ierr = VecView(global_out,PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
498   }
499 
500   return 0;
501 }
502 /* --------------------------------------------------------------------- */
503 /*
504    RHSJacobian - User-provided routine to compute the Jacobian of
505    the nonlinear right-hand-side function of the ODE.
506 
507    Input Parameters:
508    ts - the TS context
509    t - current time
510    global_in - global input vector
511    dummy - optional user-defined context, as set by TSetRHSJacobian()
512 
513    Output Parameters:
514    AA - Jacobian matrix
515    BB - optionally different preconditioning matrix
516    str - flag indicating matrix structure
517 
518   Notes:
519   RHSJacobian computes entries for the locally owned part of the Jacobian.
520    - Currently, all PETSc parallel matrix formats are partitioned by
521      contiguous chunks of rows across the processors.
522    - Each processor needs to insert only elements that it owns
523      locally (but any non-local elements will be sent to the
524      appropriate processor during matrix assembly).
525    - Always specify global row and columns of matrix entries when
526      using MatSetValues().
527    - Here, we set all entries for a particular row at once.
528    - Note that MatSetValues() uses 0-based row and column numbers
529      in Fortran as well as in C.
530 */
531 PetscErrorCode RHSJacobian(TS ts,PetscReal t,Vec global_in,Mat AA,Mat BB,void *ctx)
532 {
533   AppCtx            *appctx  = (AppCtx*)ctx;    /* user-defined application context */
534   Vec               local_in = appctx->u_local;   /* local ghosted input vector */
535   DM                da       = appctx->da;        /* distributed array */
536   PetscScalar       v[3],sc;
537   const PetscScalar *localptr;
538   PetscErrorCode    ierr;
539   PetscInt          i,mstart,mend,mstarts,mends,idx[3],is;
540 
541   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
542      Get ready for local Jacobian computations
543      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
544   /*
545      Scatter ghost points to local vector, using the 2-step process
546         DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
547      By placing code between these two statements, computations can be
548      done while messages are in transition.
549   */
550   ierr = DMGlobalToLocalBegin(da,global_in,INSERT_VALUES,local_in);CHKERRQ(ierr);
551   ierr = DMGlobalToLocalEnd(da,global_in,INSERT_VALUES,local_in);CHKERRQ(ierr);
552 
553   /*
554      Get pointer to vector data
555   */
556   ierr = VecGetArrayRead(local_in,&localptr);CHKERRQ(ierr);
557 
558   /*
559      Get starting and ending locally owned rows of the matrix
560   */
561   ierr   = MatGetOwnershipRange(BB,&mstarts,&mends);CHKERRQ(ierr);
562   mstart = mstarts; mend = mends;
563 
564   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
565      Compute entries for the locally owned part of the Jacobian.
566       - Currently, all PETSc parallel matrix formats are partitioned by
567         contiguous chunks of rows across the processors.
568       - Each processor needs to insert only elements that it owns
569         locally (but any non-local elements will be sent to the
570         appropriate processor during matrix assembly).
571       - Here, we set all entries for a particular row at once.
572       - We can set matrix entries either using either
573         MatSetValuesLocal() or MatSetValues().
574      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
575 
576   /*
577      Set matrix rows corresponding to boundary data
578   */
579   if (mstart == 0) {
580     v[0] = 0.0;
581     ierr = MatSetValues(BB,1,&mstart,1,&mstart,v,INSERT_VALUES);CHKERRQ(ierr);
582     mstart++;
583   }
584   if (mend == appctx->m) {
585     mend--;
586     v[0] = 0.0;
587     ierr = MatSetValues(BB,1,&mend,1,&mend,v,INSERT_VALUES);CHKERRQ(ierr);
588   }
589 
590   /*
591      Set matrix rows corresponding to interior data.  We construct the
592      matrix one row at a time.
593   */
594   sc = 1.0/(appctx->h*appctx->h*2.0*(1.0+t)*(1.0+t));
595   for (i=mstart; i<mend; i++) {
596     idx[0] = i-1; idx[1] = i; idx[2] = i+1;
597     is     = i - mstart + 1;
598     v[0]   = sc*localptr[is];
599     v[1]   = sc*(localptr[is+1] + localptr[is-1] - 4.0*localptr[is]);
600     v[2]   = sc*localptr[is];
601     ierr   = MatSetValues(BB,1,&i,3,idx,v,INSERT_VALUES);CHKERRQ(ierr);
602   }
603 
604   /*
605      Restore vector
606   */
607   ierr = VecRestoreArrayRead(local_in,&localptr);CHKERRQ(ierr);
608 
609   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
610      Complete the matrix assembly process and set some options
611      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
612   /*
613      Assemble matrix, using the 2-step process:
614        MatAssemblyBegin(), MatAssemblyEnd()
615      Computations can be done while messages are in transition
616      by placing code between these two statements.
617   */
618   ierr = MatAssemblyBegin(BB,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
619   ierr = MatAssemblyEnd(BB,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
620   if (BB != AA) {
621     ierr = MatAssemblyBegin(AA,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
622     ierr = MatAssemblyEnd(AA,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
623   }
624 
625   /*
626      Set and option to indicate that we will never add a new nonzero location
627      to the matrix. If we do, it will generate an error.
628   */
629   ierr = MatSetOption(BB,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE);CHKERRQ(ierr);
630 
631   return 0;
632 }
633 
634 /*TEST
635 
636     test:
637       args: -nox -ts_dt 10 -mymonitor
638       nsize: 2
639       requires: !single
640 
641     test:
642       suffix: tut_1
643       nsize: 1
644       args: -ts_max_steps 10 -ts_monitor
645 
646     test:
647       suffix: tut_2
648       nsize: 4
649       args: -ts_max_steps 10 -ts_monitor -snes_monitor -ksp_monitor
650 
651     test:
652       suffix: tut_3
653       nsize: 4
654       args: ./ex2 -ts_max_steps 10 -ts_monitor -M 128
655 
656 TEST*/
657 
658