xref: /petsc/src/ts/tutorials/ex2.c (revision 5f80ce2ab25dff0f4601e710601cbbcecf323266)
1 
2 static char help[] ="Solves a time-dependent nonlinear PDE. Uses implicit\n\
3 timestepping.  Runtime options include:\n\
4   -M <xg>, where <xg> = number of grid points\n\
5   -debug : Activate debugging printouts\n\
6   -nox   : Deactivate x-window graphics\n\n";
7 
8 /*
9    Concepts: TS^time-dependent nonlinear problems
10    Processors: n
11 */
12 
13 /* ------------------------------------------------------------------------
14 
15    This program solves the PDE
16 
17                u * u_xx
18          u_t = ---------
19                2*(t+1)^2
20 
21     on the domain 0 <= x <= 1, with boundary conditions
22          u(t,0) = t + 1,  u(t,1) = 2*t + 2,
23     and initial condition
24          u(0,x) = 1 + x*x.
25 
26     The exact solution is:
27          u(t,x) = (1 + x*x) * (1 + t)
28 
29     Note that since the solution is linear in time and quadratic in x,
30     the finite difference scheme actually computes the "exact" solution.
31 
32     We use by default the backward Euler method.
33 
34   ------------------------------------------------------------------------- */
35 
36 /*
37    Include "petscts.h" to use the PETSc timestepping routines. Note that
38    this file automatically includes "petscsys.h" and other lower-level
39    PETSc include files.
40 
41    Include the "petscdmda.h" to allow us to use the distributed array data
42    structures to manage the parallel grid.
43 */
44 #include <petscts.h>
45 #include <petscdm.h>
46 #include <petscdmda.h>
47 #include <petscdraw.h>
48 
49 /*
50    User-defined application context - contains data needed by the
51    application-provided callback routines.
52 */
53 typedef struct {
54   MPI_Comm  comm;           /* communicator */
55   DM        da;             /* distributed array data structure */
56   Vec       localwork;      /* local ghosted work vector */
57   Vec       u_local;        /* local ghosted approximate solution vector */
58   Vec       solution;       /* global exact solution vector */
59   PetscInt  m;              /* total number of grid points */
60   PetscReal h;              /* mesh width: h = 1/(m-1) */
61   PetscBool debug;          /* flag (1 indicates activation of debugging printouts) */
62 } AppCtx;
63 
64 /*
65    User-defined routines, provided below.
66 */
67 extern PetscErrorCode InitialConditions(Vec,AppCtx*);
68 extern PetscErrorCode RHSFunction(TS,PetscReal,Vec,Vec,void*);
69 extern PetscErrorCode RHSJacobian(TS,PetscReal,Vec,Mat,Mat,void*);
70 extern PetscErrorCode Monitor(TS,PetscInt,PetscReal,Vec,void*);
71 extern PetscErrorCode ExactSolution(PetscReal,Vec,AppCtx*);
72 
73 int main(int argc,char **argv)
74 {
75   AppCtx         appctx;                 /* user-defined application context */
76   TS             ts;                     /* timestepping context */
77   Mat            A;                      /* Jacobian matrix data structure */
78   Vec            u;                      /* approximate solution vector */
79   PetscInt       time_steps_max = 100;  /* default max timesteps */
80   PetscErrorCode ierr;
81   PetscReal      dt;
82   PetscReal      time_total_max = 100.0; /* default max total time */
83   PetscBool      mymonitor      = PETSC_FALSE;
84   PetscReal      bounds[]       = {1.0, 3.3};
85 
86   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
87      Initialize program and set problem parameters
88      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
89 
90   ierr = PetscInitialize(&argc,&argv,(char*)0,help);if (ierr) return ierr;
91   CHKERRQ(PetscViewerDrawSetBounds(PETSC_VIEWER_DRAW_(PETSC_COMM_WORLD),1,bounds));
92 
93   appctx.comm = PETSC_COMM_WORLD;
94   appctx.m    = 60;
95 
96   CHKERRQ(PetscOptionsGetInt(NULL,NULL,"-M",&appctx.m,NULL));
97   CHKERRQ(PetscOptionsHasName(NULL,NULL,"-debug",&appctx.debug));
98   CHKERRQ(PetscOptionsHasName(NULL,NULL,"-mymonitor",&mymonitor));
99 
100   appctx.h    = 1.0/(appctx.m-1.0);
101 
102   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
103      Create vector data structures
104      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
105 
106   /*
107      Create distributed array (DMDA) to manage parallel grid and vectors
108      and to set up the ghost point communication pattern.  There are M
109      total grid values spread equally among all the processors.
110   */
111   CHKERRQ(DMDACreate1d(PETSC_COMM_WORLD,DM_BOUNDARY_NONE,appctx.m,1,1,NULL,&appctx.da));
112   CHKERRQ(DMSetFromOptions(appctx.da));
113   CHKERRQ(DMSetUp(appctx.da));
114 
115   /*
116      Extract global and local vectors from DMDA; we use these to store the
117      approximate solution.  Then duplicate these for remaining vectors that
118      have the same types.
119   */
120   CHKERRQ(DMCreateGlobalVector(appctx.da,&u));
121   CHKERRQ(DMCreateLocalVector(appctx.da,&appctx.u_local));
122 
123   /*
124      Create local work vector for use in evaluating right-hand-side function;
125      create global work vector for storing exact solution.
126   */
127   CHKERRQ(VecDuplicate(appctx.u_local,&appctx.localwork));
128   CHKERRQ(VecDuplicate(u,&appctx.solution));
129 
130   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
131      Create timestepping solver context; set callback routine for
132      right-hand-side function evaluation.
133      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
134 
135   CHKERRQ(TSCreate(PETSC_COMM_WORLD,&ts));
136   CHKERRQ(TSSetProblemType(ts,TS_NONLINEAR));
137   CHKERRQ(TSSetRHSFunction(ts,NULL,RHSFunction,&appctx));
138 
139   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
140      Set optional user-defined monitoring routine
141      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
142 
143   if (mymonitor) {
144     CHKERRQ(TSMonitorSet(ts,Monitor,&appctx,NULL));
145   }
146 
147   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
148      For nonlinear problems, the user can provide a Jacobian evaluation
149      routine (or use a finite differencing approximation).
150 
151      Create matrix data structure; set Jacobian evaluation routine.
152      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
153 
154   CHKERRQ(MatCreate(PETSC_COMM_WORLD,&A));
155   CHKERRQ(MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,appctx.m,appctx.m));
156   CHKERRQ(MatSetFromOptions(A));
157   CHKERRQ(MatSetUp(A));
158   CHKERRQ(TSSetRHSJacobian(ts,A,A,RHSJacobian,&appctx));
159 
160   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
161      Set solution vector and initial timestep
162      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
163 
164   dt   = appctx.h/2.0;
165   CHKERRQ(TSSetTimeStep(ts,dt));
166 
167   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
168      Customize timestepping solver:
169        - Set the solution method to be the Backward Euler method.
170        - Set timestepping duration info
171      Then set runtime options, which can override these defaults.
172      For example,
173           -ts_max_steps <maxsteps> -ts_max_time <maxtime>
174      to override the defaults set by TSSetMaxSteps()/TSSetMaxTime().
175      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
176 
177   CHKERRQ(TSSetType(ts,TSBEULER));
178   CHKERRQ(TSSetMaxSteps(ts,time_steps_max));
179   CHKERRQ(TSSetMaxTime(ts,time_total_max));
180   CHKERRQ(TSSetExactFinalTime(ts,TS_EXACTFINALTIME_STEPOVER));
181   CHKERRQ(TSSetFromOptions(ts));
182 
183   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
184      Solve the problem
185      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
186 
187   /*
188      Evaluate initial conditions
189   */
190   CHKERRQ(InitialConditions(u,&appctx));
191 
192   /*
193      Run the timestepping solver
194   */
195   CHKERRQ(TSSolve(ts,u));
196 
197   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
198      Free work space.  All PETSc objects should be destroyed when they
199      are no longer needed.
200      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
201 
202   CHKERRQ(TSDestroy(&ts));
203   CHKERRQ(VecDestroy(&u));
204   CHKERRQ(MatDestroy(&A));
205   CHKERRQ(DMDestroy(&appctx.da));
206   CHKERRQ(VecDestroy(&appctx.localwork));
207   CHKERRQ(VecDestroy(&appctx.solution));
208   CHKERRQ(VecDestroy(&appctx.u_local));
209 
210   /*
211      Always call PetscFinalize() before exiting a program.  This routine
212        - finalizes the PETSc libraries as well as MPI
213        - provides summary and diagnostic information if certain runtime
214          options are chosen (e.g., -log_view).
215   */
216   ierr = PetscFinalize();
217   return ierr;
218 }
219 /* --------------------------------------------------------------------- */
220 /*
221    InitialConditions - Computes the solution at the initial time.
222 
223    Input Parameters:
224    u - uninitialized solution vector (global)
225    appctx - user-defined application context
226 
227    Output Parameter:
228    u - vector with solution at initial time (global)
229 */
230 PetscErrorCode InitialConditions(Vec u,AppCtx *appctx)
231 {
232   PetscScalar    *u_localptr,h = appctx->h,x;
233   PetscInt       i,mybase,myend;
234 
235   /*
236      Determine starting point of each processor's range of
237      grid values.
238   */
239   CHKERRQ(VecGetOwnershipRange(u,&mybase,&myend));
240 
241   /*
242     Get a pointer to vector data.
243     - For default PETSc vectors, VecGetArray() returns a pointer to
244       the data array.  Otherwise, the routine is implementation dependent.
245     - You MUST call VecRestoreArray() when you no longer need access to
246       the array.
247     - Note that the Fortran interface to VecGetArray() differs from the
248       C version.  See the users manual for details.
249   */
250   CHKERRQ(VecGetArray(u,&u_localptr));
251 
252   /*
253      We initialize the solution array by simply writing the solution
254      directly into the array locations.  Alternatively, we could use
255      VecSetValues() or VecSetValuesLocal().
256   */
257   for (i=mybase; i<myend; i++) {
258     x = h*(PetscReal)i; /* current location in global grid */
259     u_localptr[i-mybase] = 1.0 + x*x;
260   }
261 
262   /*
263      Restore vector
264   */
265   CHKERRQ(VecRestoreArray(u,&u_localptr));
266 
267   /*
268      Print debugging information if desired
269   */
270   if (appctx->debug) {
271     CHKERRQ(PetscPrintf(appctx->comm,"initial guess vector\n"));
272     CHKERRQ(VecView(u,PETSC_VIEWER_STDOUT_WORLD));
273   }
274 
275   return 0;
276 }
277 /* --------------------------------------------------------------------- */
278 /*
279    ExactSolution - Computes the exact solution at a given time.
280 
281    Input Parameters:
282    t - current time
283    solution - vector in which exact solution will be computed
284    appctx - user-defined application context
285 
286    Output Parameter:
287    solution - vector with the newly computed exact solution
288 */
289 PetscErrorCode ExactSolution(PetscReal t,Vec solution,AppCtx *appctx)
290 {
291   PetscScalar    *s_localptr,h = appctx->h,x;
292   PetscInt       i,mybase,myend;
293 
294   /*
295      Determine starting and ending points of each processor's
296      range of grid values
297   */
298   CHKERRQ(VecGetOwnershipRange(solution,&mybase,&myend));
299 
300   /*
301      Get a pointer to vector data.
302   */
303   CHKERRQ(VecGetArray(solution,&s_localptr));
304 
305   /*
306      Simply write the solution directly into the array locations.
307      Alternatively, we could use VecSetValues() or VecSetValuesLocal().
308   */
309   for (i=mybase; i<myend; i++) {
310     x = h*(PetscReal)i;
311     s_localptr[i-mybase] = (t + 1.0)*(1.0 + x*x);
312   }
313 
314   /*
315      Restore vector
316   */
317   CHKERRQ(VecRestoreArray(solution,&s_localptr));
318   return 0;
319 }
320 /* --------------------------------------------------------------------- */
321 /*
322    Monitor - User-provided routine to monitor the solution computed at
323    each timestep.  This example plots the solution and computes the
324    error in two different norms.
325 
326    Input Parameters:
327    ts     - the timestep context
328    step   - the count of the current step (with 0 meaning the
329             initial condition)
330    time   - the current time
331    u      - the solution at this timestep
332    ctx    - the user-provided context for this monitoring routine.
333             In this case we use the application context which contains
334             information about the problem size, workspace and the exact
335             solution.
336 */
337 PetscErrorCode Monitor(TS ts,PetscInt step,PetscReal time,Vec u,void *ctx)
338 {
339   AppCtx         *appctx = (AppCtx*) ctx;   /* user-defined application context */
340   PetscReal      en2,en2s,enmax;
341   PetscDraw      draw;
342 
343   /*
344      We use the default X Windows viewer
345              PETSC_VIEWER_DRAW_(appctx->comm)
346      that is associated with the current communicator. This saves
347      the effort of calling PetscViewerDrawOpen() to create the window.
348      Note that if we wished to plot several items in separate windows we
349      would create each viewer with PetscViewerDrawOpen() and store them in
350      the application context, appctx.
351 
352      PetscReal buffering makes graphics look better.
353   */
354   CHKERRQ(PetscViewerDrawGetDraw(PETSC_VIEWER_DRAW_(appctx->comm),0,&draw));
355   CHKERRQ(PetscDrawSetDoubleBuffer(draw));
356   CHKERRQ(VecView(u,PETSC_VIEWER_DRAW_(appctx->comm)));
357 
358   /*
359      Compute the exact solution at this timestep
360   */
361   CHKERRQ(ExactSolution(time,appctx->solution,appctx));
362 
363   /*
364      Print debugging information if desired
365   */
366   if (appctx->debug) {
367     CHKERRQ(PetscPrintf(appctx->comm,"Computed solution vector\n"));
368     CHKERRQ(VecView(u,PETSC_VIEWER_STDOUT_WORLD));
369     CHKERRQ(PetscPrintf(appctx->comm,"Exact solution vector\n"));
370     CHKERRQ(VecView(appctx->solution,PETSC_VIEWER_STDOUT_WORLD));
371   }
372 
373   /*
374      Compute the 2-norm and max-norm of the error
375   */
376   CHKERRQ(VecAXPY(appctx->solution,-1.0,u));
377   CHKERRQ(VecNorm(appctx->solution,NORM_2,&en2));
378   en2s = PetscSqrtReal(appctx->h)*en2;  /* scale the 2-norm by the grid spacing */
379   CHKERRQ(VecNorm(appctx->solution,NORM_MAX,&enmax));
380 
381   /*
382      PetscPrintf() causes only the first processor in this
383      communicator to print the timestep information.
384   */
385   CHKERRQ(PetscPrintf(appctx->comm,"Timestep %D: time = %g 2-norm error = %g  max norm error = %g\n",step,(double)time,(double)en2s,(double)enmax));
386 
387   /*
388      Print debugging information if desired
389   */
390   if (appctx->debug) {
391     CHKERRQ(PetscPrintf(appctx->comm,"Error vector\n"));
392     CHKERRQ(VecView(appctx->solution,PETSC_VIEWER_STDOUT_WORLD));
393   }
394   return 0;
395 }
396 /* --------------------------------------------------------------------- */
397 /*
398    RHSFunction - User-provided routine that evalues the right-hand-side
399    function of the ODE.  This routine is set in the main program by
400    calling TSSetRHSFunction().  We compute:
401           global_out = F(global_in)
402 
403    Input Parameters:
404    ts         - timesteping context
405    t          - current time
406    global_in  - vector containing the current iterate
407    ctx        - (optional) user-provided context for function evaluation.
408                 In this case we use the appctx defined above.
409 
410    Output Parameter:
411    global_out - vector containing the newly evaluated function
412 */
413 PetscErrorCode RHSFunction(TS ts,PetscReal t,Vec global_in,Vec global_out,void *ctx)
414 {
415   AppCtx            *appctx   = (AppCtx*) ctx;     /* user-defined application context */
416   DM                da        = appctx->da;        /* distributed array */
417   Vec               local_in  = appctx->u_local;   /* local ghosted input vector */
418   Vec               localwork = appctx->localwork; /* local ghosted work vector */
419   PetscInt          i,localsize;
420   PetscMPIInt       rank,size;
421   PetscScalar       *copyptr,sc;
422   const PetscScalar *localptr;
423 
424   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
425      Get ready for local function computations
426      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
427   /*
428      Scatter ghost points to local vector, using the 2-step process
429         DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
430      By placing code between these two statements, computations can be
431      done while messages are in transition.
432   */
433   CHKERRQ(DMGlobalToLocalBegin(da,global_in,INSERT_VALUES,local_in));
434   CHKERRQ(DMGlobalToLocalEnd(da,global_in,INSERT_VALUES,local_in));
435 
436   /*
437       Access directly the values in our local INPUT work array
438   */
439   CHKERRQ(VecGetArrayRead(local_in,&localptr));
440 
441   /*
442       Access directly the values in our local OUTPUT work array
443   */
444   CHKERRQ(VecGetArray(localwork,&copyptr));
445 
446   sc = 1.0/(appctx->h*appctx->h*2.0*(1.0+t)*(1.0+t));
447 
448   /*
449       Evaluate our function on the nodes owned by this processor
450   */
451   CHKERRQ(VecGetLocalSize(local_in,&localsize));
452 
453   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
454      Compute entries for the locally owned part
455      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
456 
457   /*
458      Handle boundary conditions: This is done by using the boundary condition
459         u(t,boundary) = g(t,boundary)
460      for some function g. Now take the derivative with respect to t to obtain
461         u_{t}(t,boundary) = g_{t}(t,boundary)
462 
463      In our case, u(t,0) = t + 1, so that u_{t}(t,0) = 1
464              and  u(t,1) = 2t+ 2, so that u_{t}(t,1) = 2
465   */
466   CHKERRMPI(MPI_Comm_rank(appctx->comm,&rank));
467   CHKERRMPI(MPI_Comm_size(appctx->comm,&size));
468   if (rank == 0)          copyptr[0]           = 1.0;
469   if (rank == size-1) copyptr[localsize-1] = 2.0;
470 
471   /*
472      Handle the interior nodes where the PDE is replace by finite
473      difference operators.
474   */
475   for (i=1; i<localsize-1; i++) copyptr[i] =  localptr[i] * sc * (localptr[i+1] + localptr[i-1] - 2.0*localptr[i]);
476 
477   /*
478      Restore vectors
479   */
480   CHKERRQ(VecRestoreArrayRead(local_in,&localptr));
481   CHKERRQ(VecRestoreArray(localwork,&copyptr));
482 
483   /*
484      Insert values from the local OUTPUT vector into the global
485      output vector
486   */
487   CHKERRQ(DMLocalToGlobalBegin(da,localwork,INSERT_VALUES,global_out));
488   CHKERRQ(DMLocalToGlobalEnd(da,localwork,INSERT_VALUES,global_out));
489 
490   /* Print debugging information if desired */
491   if (appctx->debug) {
492     CHKERRQ(PetscPrintf(appctx->comm,"RHS function vector\n"));
493     CHKERRQ(VecView(global_out,PETSC_VIEWER_STDOUT_WORLD));
494   }
495 
496   return 0;
497 }
498 /* --------------------------------------------------------------------- */
499 /*
500    RHSJacobian - User-provided routine to compute the Jacobian of
501    the nonlinear right-hand-side function of the ODE.
502 
503    Input Parameters:
504    ts - the TS context
505    t - current time
506    global_in - global input vector
507    dummy - optional user-defined context, as set by TSetRHSJacobian()
508 
509    Output Parameters:
510    AA - Jacobian matrix
511    BB - optionally different preconditioning matrix
512    str - flag indicating matrix structure
513 
514   Notes:
515   RHSJacobian computes entries for the locally owned part of the Jacobian.
516    - Currently, all PETSc parallel matrix formats are partitioned by
517      contiguous chunks of rows across the processors.
518    - Each processor needs to insert only elements that it owns
519      locally (but any non-local elements will be sent to the
520      appropriate processor during matrix assembly).
521    - Always specify global row and columns of matrix entries when
522      using MatSetValues().
523    - Here, we set all entries for a particular row at once.
524    - Note that MatSetValues() uses 0-based row and column numbers
525      in Fortran as well as in C.
526 */
527 PetscErrorCode RHSJacobian(TS ts,PetscReal t,Vec global_in,Mat AA,Mat BB,void *ctx)
528 {
529   AppCtx            *appctx  = (AppCtx*)ctx;    /* user-defined application context */
530   Vec               local_in = appctx->u_local;   /* local ghosted input vector */
531   DM                da       = appctx->da;        /* distributed array */
532   PetscScalar       v[3],sc;
533   const PetscScalar *localptr;
534   PetscInt          i,mstart,mend,mstarts,mends,idx[3],is;
535 
536   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
537      Get ready for local Jacobian computations
538      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
539   /*
540      Scatter ghost points to local vector, using the 2-step process
541         DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
542      By placing code between these two statements, computations can be
543      done while messages are in transition.
544   */
545   CHKERRQ(DMGlobalToLocalBegin(da,global_in,INSERT_VALUES,local_in));
546   CHKERRQ(DMGlobalToLocalEnd(da,global_in,INSERT_VALUES,local_in));
547 
548   /*
549      Get pointer to vector data
550   */
551   CHKERRQ(VecGetArrayRead(local_in,&localptr));
552 
553   /*
554      Get starting and ending locally owned rows of the matrix
555   */
556   CHKERRQ(MatGetOwnershipRange(BB,&mstarts,&mends));
557   mstart = mstarts; mend = mends;
558 
559   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
560      Compute entries for the locally owned part of the Jacobian.
561       - Currently, all PETSc parallel matrix formats are partitioned by
562         contiguous chunks of rows across the processors.
563       - Each processor needs to insert only elements that it owns
564         locally (but any non-local elements will be sent to the
565         appropriate processor during matrix assembly).
566       - Here, we set all entries for a particular row at once.
567       - We can set matrix entries either using either
568         MatSetValuesLocal() or MatSetValues().
569      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
570 
571   /*
572      Set matrix rows corresponding to boundary data
573   */
574   if (mstart == 0) {
575     v[0] = 0.0;
576     CHKERRQ(MatSetValues(BB,1,&mstart,1,&mstart,v,INSERT_VALUES));
577     mstart++;
578   }
579   if (mend == appctx->m) {
580     mend--;
581     v[0] = 0.0;
582     CHKERRQ(MatSetValues(BB,1,&mend,1,&mend,v,INSERT_VALUES));
583   }
584 
585   /*
586      Set matrix rows corresponding to interior data.  We construct the
587      matrix one row at a time.
588   */
589   sc = 1.0/(appctx->h*appctx->h*2.0*(1.0+t)*(1.0+t));
590   for (i=mstart; i<mend; i++) {
591     idx[0] = i-1; idx[1] = i; idx[2] = i+1;
592     is     = i - mstart + 1;
593     v[0]   = sc*localptr[is];
594     v[1]   = sc*(localptr[is+1] + localptr[is-1] - 4.0*localptr[is]);
595     v[2]   = sc*localptr[is];
596     CHKERRQ(MatSetValues(BB,1,&i,3,idx,v,INSERT_VALUES));
597   }
598 
599   /*
600      Restore vector
601   */
602   CHKERRQ(VecRestoreArrayRead(local_in,&localptr));
603 
604   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
605      Complete the matrix assembly process and set some options
606      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
607   /*
608      Assemble matrix, using the 2-step process:
609        MatAssemblyBegin(), MatAssemblyEnd()
610      Computations can be done while messages are in transition
611      by placing code between these two statements.
612   */
613   CHKERRQ(MatAssemblyBegin(BB,MAT_FINAL_ASSEMBLY));
614   CHKERRQ(MatAssemblyEnd(BB,MAT_FINAL_ASSEMBLY));
615   if (BB != AA) {
616     CHKERRQ(MatAssemblyBegin(AA,MAT_FINAL_ASSEMBLY));
617     CHKERRQ(MatAssemblyEnd(AA,MAT_FINAL_ASSEMBLY));
618   }
619 
620   /*
621      Set and option to indicate that we will never add a new nonzero location
622      to the matrix. If we do, it will generate an error.
623   */
624   CHKERRQ(MatSetOption(BB,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE));
625 
626   return 0;
627 }
628 
629 /*TEST
630 
631     test:
632       args: -nox -ts_dt 10 -mymonitor
633       nsize: 2
634       requires: !single
635 
636     test:
637       suffix: tut_1
638       nsize: 1
639       args: -ts_max_steps 10 -ts_monitor
640 
641     test:
642       suffix: tut_2
643       nsize: 4
644       args: -ts_max_steps 10 -ts_monitor -snes_monitor -ksp_monitor
645 
646     test:
647       suffix: tut_3
648       nsize: 4
649       args: ./ex2 -ts_max_steps 10 -ts_monitor -M 128
650 
651 TEST*/
652