xref: /petsc/src/ts/tutorials/ex2.c (revision b8abcfde4cf799610cd89775278ac4145d1798ce)
1 
2 static char help[] ="Solves a time-dependent nonlinear PDE. Uses implicit\n\
3 timestepping.  Runtime options include:\n\
4   -M <xg>, where <xg> = number of grid points\n\
5   -debug : Activate debugging printouts\n\
6   -nox   : Deactivate x-window graphics\n\n";
7 
8 /*
9    Concepts: TS^time-dependent nonlinear problems
10    Processors: n
11 */
12 
13 /* ------------------------------------------------------------------------
14 
15    This program solves the PDE
16 
17                u * u_xx
18          u_t = ---------
19                2*(t+1)^2
20 
21     on the domain 0 <= x <= 1, with boundary conditions
22          u(t,0) = t + 1,  u(t,1) = 2*t + 2,
23     and initial condition
24          u(0,x) = 1 + x*x.
25 
26     The exact solution is:
27          u(t,x) = (1 + x*x) * (1 + t)
28 
29     Note that since the solution is linear in time and quadratic in x,
30     the finite difference scheme actually computes the "exact" solution.
31 
32     We use by default the backward Euler method.
33 
34   ------------------------------------------------------------------------- */
35 
36 /*
37    Include "petscts.h" to use the PETSc timestepping routines. Note that
38    this file automatically includes "petscsys.h" and other lower-level
39    PETSc include files.
40 
41    Include the "petscdmda.h" to allow us to use the distributed array data
42    structures to manage the parallel grid.
43 */
44 #include <petscts.h>
45 #include <petscdm.h>
46 #include <petscdmda.h>
47 #include <petscdraw.h>
48 
49 /*
50    User-defined application context - contains data needed by the
51    application-provided callback routines.
52 */
53 typedef struct {
54   MPI_Comm  comm;           /* communicator */
55   DM        da;             /* distributed array data structure */
56   Vec       localwork;      /* local ghosted work vector */
57   Vec       u_local;        /* local ghosted approximate solution vector */
58   Vec       solution;       /* global exact solution vector */
59   PetscInt  m;              /* total number of grid points */
60   PetscReal h;              /* mesh width: h = 1/(m-1) */
61   PetscBool debug;          /* flag (1 indicates activation of debugging printouts) */
62 } AppCtx;
63 
64 /*
65    User-defined routines, provided below.
66 */
67 extern PetscErrorCode InitialConditions(Vec,AppCtx*);
68 extern PetscErrorCode RHSFunction(TS,PetscReal,Vec,Vec,void*);
69 extern PetscErrorCode RHSJacobian(TS,PetscReal,Vec,Mat,Mat,void*);
70 extern PetscErrorCode Monitor(TS,PetscInt,PetscReal,Vec,void*);
71 extern PetscErrorCode ExactSolution(PetscReal,Vec,AppCtx*);
72 
73 int main(int argc,char **argv)
74 {
75   AppCtx         appctx;                 /* user-defined application context */
76   TS             ts;                     /* timestepping context */
77   Mat            A;                      /* Jacobian matrix data structure */
78   Vec            u;                      /* approximate solution vector */
79   PetscInt       time_steps_max = 100;  /* default max timesteps */
80   PetscReal      dt;
81   PetscReal      time_total_max = 100.0; /* default max total time */
82   PetscBool      mymonitor      = PETSC_FALSE;
83   PetscReal      bounds[]       = {1.0, 3.3};
84 
85   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
86      Initialize program and set problem parameters
87      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
88 
89   PetscCall(PetscInitialize(&argc,&argv,(char*)0,help));
90   PetscCall(PetscViewerDrawSetBounds(PETSC_VIEWER_DRAW_(PETSC_COMM_WORLD),1,bounds));
91 
92   appctx.comm = PETSC_COMM_WORLD;
93   appctx.m    = 60;
94 
95   PetscCall(PetscOptionsGetInt(NULL,NULL,"-M",&appctx.m,NULL));
96   PetscCall(PetscOptionsHasName(NULL,NULL,"-debug",&appctx.debug));
97   PetscCall(PetscOptionsHasName(NULL,NULL,"-mymonitor",&mymonitor));
98 
99   appctx.h    = 1.0/(appctx.m-1.0);
100 
101   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
102      Create vector data structures
103      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
104 
105   /*
106      Create distributed array (DMDA) to manage parallel grid and vectors
107      and to set up the ghost point communication pattern.  There are M
108      total grid values spread equally among all the processors.
109   */
110   PetscCall(DMDACreate1d(PETSC_COMM_WORLD,DM_BOUNDARY_NONE,appctx.m,1,1,NULL,&appctx.da));
111   PetscCall(DMSetFromOptions(appctx.da));
112   PetscCall(DMSetUp(appctx.da));
113 
114   /*
115      Extract global and local vectors from DMDA; we use these to store the
116      approximate solution.  Then duplicate these for remaining vectors that
117      have the same types.
118   */
119   PetscCall(DMCreateGlobalVector(appctx.da,&u));
120   PetscCall(DMCreateLocalVector(appctx.da,&appctx.u_local));
121 
122   /*
123      Create local work vector for use in evaluating right-hand-side function;
124      create global work vector for storing exact solution.
125   */
126   PetscCall(VecDuplicate(appctx.u_local,&appctx.localwork));
127   PetscCall(VecDuplicate(u,&appctx.solution));
128 
129   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
130      Create timestepping solver context; set callback routine for
131      right-hand-side function evaluation.
132      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
133 
134   PetscCall(TSCreate(PETSC_COMM_WORLD,&ts));
135   PetscCall(TSSetProblemType(ts,TS_NONLINEAR));
136   PetscCall(TSSetRHSFunction(ts,NULL,RHSFunction,&appctx));
137 
138   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
139      Set optional user-defined monitoring routine
140      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
141 
142   if (mymonitor) {
143     PetscCall(TSMonitorSet(ts,Monitor,&appctx,NULL));
144   }
145 
146   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
147      For nonlinear problems, the user can provide a Jacobian evaluation
148      routine (or use a finite differencing approximation).
149 
150      Create matrix data structure; set Jacobian evaluation routine.
151      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
152 
153   PetscCall(MatCreate(PETSC_COMM_WORLD,&A));
154   PetscCall(MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,appctx.m,appctx.m));
155   PetscCall(MatSetFromOptions(A));
156   PetscCall(MatSetUp(A));
157   PetscCall(TSSetRHSJacobian(ts,A,A,RHSJacobian,&appctx));
158 
159   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
160      Set solution vector and initial timestep
161      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
162 
163   dt   = appctx.h/2.0;
164   PetscCall(TSSetTimeStep(ts,dt));
165 
166   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
167      Customize timestepping solver:
168        - Set the solution method to be the Backward Euler method.
169        - Set timestepping duration info
170      Then set runtime options, which can override these defaults.
171      For example,
172           -ts_max_steps <maxsteps> -ts_max_time <maxtime>
173      to override the defaults set by TSSetMaxSteps()/TSSetMaxTime().
174      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
175 
176   PetscCall(TSSetType(ts,TSBEULER));
177   PetscCall(TSSetMaxSteps(ts,time_steps_max));
178   PetscCall(TSSetMaxTime(ts,time_total_max));
179   PetscCall(TSSetExactFinalTime(ts,TS_EXACTFINALTIME_STEPOVER));
180   PetscCall(TSSetFromOptions(ts));
181 
182   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
183      Solve the problem
184      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
185 
186   /*
187      Evaluate initial conditions
188   */
189   PetscCall(InitialConditions(u,&appctx));
190 
191   /*
192      Run the timestepping solver
193   */
194   PetscCall(TSSolve(ts,u));
195 
196   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
197      Free work space.  All PETSc objects should be destroyed when they
198      are no longer needed.
199      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
200 
201   PetscCall(TSDestroy(&ts));
202   PetscCall(VecDestroy(&u));
203   PetscCall(MatDestroy(&A));
204   PetscCall(DMDestroy(&appctx.da));
205   PetscCall(VecDestroy(&appctx.localwork));
206   PetscCall(VecDestroy(&appctx.solution));
207   PetscCall(VecDestroy(&appctx.u_local));
208 
209   /*
210      Always call PetscFinalize() before exiting a program.  This routine
211        - finalizes the PETSc libraries as well as MPI
212        - provides summary and diagnostic information if certain runtime
213          options are chosen (e.g., -log_view).
214   */
215   PetscCall(PetscFinalize());
216   return 0;
217 }
218 /* --------------------------------------------------------------------- */
219 /*
220    InitialConditions - Computes the solution at the initial time.
221 
222    Input Parameters:
223    u - uninitialized solution vector (global)
224    appctx - user-defined application context
225 
226    Output Parameter:
227    u - vector with solution at initial time (global)
228 */
229 PetscErrorCode InitialConditions(Vec u,AppCtx *appctx)
230 {
231   PetscScalar    *u_localptr,h = appctx->h,x;
232   PetscInt       i,mybase,myend;
233 
234   /*
235      Determine starting point of each processor's range of
236      grid values.
237   */
238   PetscCall(VecGetOwnershipRange(u,&mybase,&myend));
239 
240   /*
241     Get a pointer to vector data.
242     - For default PETSc vectors, VecGetArray() returns a pointer to
243       the data array.  Otherwise, the routine is implementation dependent.
244     - You MUST call VecRestoreArray() when you no longer need access to
245       the array.
246     - Note that the Fortran interface to VecGetArray() differs from the
247       C version.  See the users manual for details.
248   */
249   PetscCall(VecGetArray(u,&u_localptr));
250 
251   /*
252      We initialize the solution array by simply writing the solution
253      directly into the array locations.  Alternatively, we could use
254      VecSetValues() or VecSetValuesLocal().
255   */
256   for (i=mybase; i<myend; i++) {
257     x = h*(PetscReal)i; /* current location in global grid */
258     u_localptr[i-mybase] = 1.0 + x*x;
259   }
260 
261   /*
262      Restore vector
263   */
264   PetscCall(VecRestoreArray(u,&u_localptr));
265 
266   /*
267      Print debugging information if desired
268   */
269   if (appctx->debug) {
270     PetscCall(PetscPrintf(appctx->comm,"initial guess vector\n"));
271     PetscCall(VecView(u,PETSC_VIEWER_STDOUT_WORLD));
272   }
273 
274   return 0;
275 }
276 /* --------------------------------------------------------------------- */
277 /*
278    ExactSolution - Computes the exact solution at a given time.
279 
280    Input Parameters:
281    t - current time
282    solution - vector in which exact solution will be computed
283    appctx - user-defined application context
284 
285    Output Parameter:
286    solution - vector with the newly computed exact solution
287 */
288 PetscErrorCode ExactSolution(PetscReal t,Vec solution,AppCtx *appctx)
289 {
290   PetscScalar    *s_localptr,h = appctx->h,x;
291   PetscInt       i,mybase,myend;
292 
293   /*
294      Determine starting and ending points of each processor's
295      range of grid values
296   */
297   PetscCall(VecGetOwnershipRange(solution,&mybase,&myend));
298 
299   /*
300      Get a pointer to vector data.
301   */
302   PetscCall(VecGetArray(solution,&s_localptr));
303 
304   /*
305      Simply write the solution directly into the array locations.
306      Alternatively, we could use VecSetValues() or VecSetValuesLocal().
307   */
308   for (i=mybase; i<myend; i++) {
309     x = h*(PetscReal)i;
310     s_localptr[i-mybase] = (t + 1.0)*(1.0 + x*x);
311   }
312 
313   /*
314      Restore vector
315   */
316   PetscCall(VecRestoreArray(solution,&s_localptr));
317   return 0;
318 }
319 /* --------------------------------------------------------------------- */
320 /*
321    Monitor - User-provided routine to monitor the solution computed at
322    each timestep.  This example plots the solution and computes the
323    error in two different norms.
324 
325    Input Parameters:
326    ts     - the timestep context
327    step   - the count of the current step (with 0 meaning the
328             initial condition)
329    time   - the current time
330    u      - the solution at this timestep
331    ctx    - the user-provided context for this monitoring routine.
332             In this case we use the application context which contains
333             information about the problem size, workspace and the exact
334             solution.
335 */
336 PetscErrorCode Monitor(TS ts,PetscInt step,PetscReal time,Vec u,void *ctx)
337 {
338   AppCtx         *appctx = (AppCtx*) ctx;   /* user-defined application context */
339   PetscReal      en2,en2s,enmax;
340   PetscDraw      draw;
341 
342   /*
343      We use the default X Windows viewer
344              PETSC_VIEWER_DRAW_(appctx->comm)
345      that is associated with the current communicator. This saves
346      the effort of calling PetscViewerDrawOpen() to create the window.
347      Note that if we wished to plot several items in separate windows we
348      would create each viewer with PetscViewerDrawOpen() and store them in
349      the application context, appctx.
350 
351      PetscReal buffering makes graphics look better.
352   */
353   PetscCall(PetscViewerDrawGetDraw(PETSC_VIEWER_DRAW_(appctx->comm),0,&draw));
354   PetscCall(PetscDrawSetDoubleBuffer(draw));
355   PetscCall(VecView(u,PETSC_VIEWER_DRAW_(appctx->comm)));
356 
357   /*
358      Compute the exact solution at this timestep
359   */
360   PetscCall(ExactSolution(time,appctx->solution,appctx));
361 
362   /*
363      Print debugging information if desired
364   */
365   if (appctx->debug) {
366     PetscCall(PetscPrintf(appctx->comm,"Computed solution vector\n"));
367     PetscCall(VecView(u,PETSC_VIEWER_STDOUT_WORLD));
368     PetscCall(PetscPrintf(appctx->comm,"Exact solution vector\n"));
369     PetscCall(VecView(appctx->solution,PETSC_VIEWER_STDOUT_WORLD));
370   }
371 
372   /*
373      Compute the 2-norm and max-norm of the error
374   */
375   PetscCall(VecAXPY(appctx->solution,-1.0,u));
376   PetscCall(VecNorm(appctx->solution,NORM_2,&en2));
377   en2s = PetscSqrtReal(appctx->h)*en2;  /* scale the 2-norm by the grid spacing */
378   PetscCall(VecNorm(appctx->solution,NORM_MAX,&enmax));
379 
380   /*
381      PetscPrintf() causes only the first processor in this
382      communicator to print the timestep information.
383   */
384   PetscCall(PetscPrintf(appctx->comm,"Timestep %D: time = %g 2-norm error = %g  max norm error = %g\n",step,(double)time,(double)en2s,(double)enmax));
385 
386   /*
387      Print debugging information if desired
388   */
389   if (appctx->debug) {
390     PetscCall(PetscPrintf(appctx->comm,"Error vector\n"));
391     PetscCall(VecView(appctx->solution,PETSC_VIEWER_STDOUT_WORLD));
392   }
393   return 0;
394 }
395 /* --------------------------------------------------------------------- */
396 /*
397    RHSFunction - User-provided routine that evalues the right-hand-side
398    function of the ODE.  This routine is set in the main program by
399    calling TSSetRHSFunction().  We compute:
400           global_out = F(global_in)
401 
402    Input Parameters:
403    ts         - timesteping context
404    t          - current time
405    global_in  - vector containing the current iterate
406    ctx        - (optional) user-provided context for function evaluation.
407                 In this case we use the appctx defined above.
408 
409    Output Parameter:
410    global_out - vector containing the newly evaluated function
411 */
412 PetscErrorCode RHSFunction(TS ts,PetscReal t,Vec global_in,Vec global_out,void *ctx)
413 {
414   AppCtx            *appctx   = (AppCtx*) ctx;     /* user-defined application context */
415   DM                da        = appctx->da;        /* distributed array */
416   Vec               local_in  = appctx->u_local;   /* local ghosted input vector */
417   Vec               localwork = appctx->localwork; /* local ghosted work vector */
418   PetscInt          i,localsize;
419   PetscMPIInt       rank,size;
420   PetscScalar       *copyptr,sc;
421   const PetscScalar *localptr;
422 
423   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
424      Get ready for local function computations
425      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
426   /*
427      Scatter ghost points to local vector, using the 2-step process
428         DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
429      By placing code between these two statements, computations can be
430      done while messages are in transition.
431   */
432   PetscCall(DMGlobalToLocalBegin(da,global_in,INSERT_VALUES,local_in));
433   PetscCall(DMGlobalToLocalEnd(da,global_in,INSERT_VALUES,local_in));
434 
435   /*
436       Access directly the values in our local INPUT work array
437   */
438   PetscCall(VecGetArrayRead(local_in,&localptr));
439 
440   /*
441       Access directly the values in our local OUTPUT work array
442   */
443   PetscCall(VecGetArray(localwork,&copyptr));
444 
445   sc = 1.0/(appctx->h*appctx->h*2.0*(1.0+t)*(1.0+t));
446 
447   /*
448       Evaluate our function on the nodes owned by this processor
449   */
450   PetscCall(VecGetLocalSize(local_in,&localsize));
451 
452   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
453      Compute entries for the locally owned part
454      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
455 
456   /*
457      Handle boundary conditions: This is done by using the boundary condition
458         u(t,boundary) = g(t,boundary)
459      for some function g. Now take the derivative with respect to t to obtain
460         u_{t}(t,boundary) = g_{t}(t,boundary)
461 
462      In our case, u(t,0) = t + 1, so that u_{t}(t,0) = 1
463              and  u(t,1) = 2t+ 2, so that u_{t}(t,1) = 2
464   */
465   PetscCallMPI(MPI_Comm_rank(appctx->comm,&rank));
466   PetscCallMPI(MPI_Comm_size(appctx->comm,&size));
467   if (rank == 0)          copyptr[0]           = 1.0;
468   if (rank == size-1) copyptr[localsize-1] = 2.0;
469 
470   /*
471      Handle the interior nodes where the PDE is replace by finite
472      difference operators.
473   */
474   for (i=1; i<localsize-1; i++) copyptr[i] =  localptr[i] * sc * (localptr[i+1] + localptr[i-1] - 2.0*localptr[i]);
475 
476   /*
477      Restore vectors
478   */
479   PetscCall(VecRestoreArrayRead(local_in,&localptr));
480   PetscCall(VecRestoreArray(localwork,&copyptr));
481 
482   /*
483      Insert values from the local OUTPUT vector into the global
484      output vector
485   */
486   PetscCall(DMLocalToGlobalBegin(da,localwork,INSERT_VALUES,global_out));
487   PetscCall(DMLocalToGlobalEnd(da,localwork,INSERT_VALUES,global_out));
488 
489   /* Print debugging information if desired */
490   if (appctx->debug) {
491     PetscCall(PetscPrintf(appctx->comm,"RHS function vector\n"));
492     PetscCall(VecView(global_out,PETSC_VIEWER_STDOUT_WORLD));
493   }
494 
495   return 0;
496 }
497 /* --------------------------------------------------------------------- */
498 /*
499    RHSJacobian - User-provided routine to compute the Jacobian of
500    the nonlinear right-hand-side function of the ODE.
501 
502    Input Parameters:
503    ts - the TS context
504    t - current time
505    global_in - global input vector
506    dummy - optional user-defined context, as set by TSetRHSJacobian()
507 
508    Output Parameters:
509    AA - Jacobian matrix
510    BB - optionally different preconditioning matrix
511    str - flag indicating matrix structure
512 
513   Notes:
514   RHSJacobian computes entries for the locally owned part of the Jacobian.
515    - Currently, all PETSc parallel matrix formats are partitioned by
516      contiguous chunks of rows across the processors.
517    - Each processor needs to insert only elements that it owns
518      locally (but any non-local elements will be sent to the
519      appropriate processor during matrix assembly).
520    - Always specify global row and columns of matrix entries when
521      using MatSetValues().
522    - Here, we set all entries for a particular row at once.
523    - Note that MatSetValues() uses 0-based row and column numbers
524      in Fortran as well as in C.
525 */
526 PetscErrorCode RHSJacobian(TS ts,PetscReal t,Vec global_in,Mat AA,Mat BB,void *ctx)
527 {
528   AppCtx            *appctx  = (AppCtx*)ctx;    /* user-defined application context */
529   Vec               local_in = appctx->u_local;   /* local ghosted input vector */
530   DM                da       = appctx->da;        /* distributed array */
531   PetscScalar       v[3],sc;
532   const PetscScalar *localptr;
533   PetscInt          i,mstart,mend,mstarts,mends,idx[3],is;
534 
535   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
536      Get ready for local Jacobian computations
537      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
538   /*
539      Scatter ghost points to local vector, using the 2-step process
540         DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
541      By placing code between these two statements, computations can be
542      done while messages are in transition.
543   */
544   PetscCall(DMGlobalToLocalBegin(da,global_in,INSERT_VALUES,local_in));
545   PetscCall(DMGlobalToLocalEnd(da,global_in,INSERT_VALUES,local_in));
546 
547   /*
548      Get pointer to vector data
549   */
550   PetscCall(VecGetArrayRead(local_in,&localptr));
551 
552   /*
553      Get starting and ending locally owned rows of the matrix
554   */
555   PetscCall(MatGetOwnershipRange(BB,&mstarts,&mends));
556   mstart = mstarts; mend = mends;
557 
558   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
559      Compute entries for the locally owned part of the Jacobian.
560       - Currently, all PETSc parallel matrix formats are partitioned by
561         contiguous chunks of rows across the processors.
562       - Each processor needs to insert only elements that it owns
563         locally (but any non-local elements will be sent to the
564         appropriate processor during matrix assembly).
565       - Here, we set all entries for a particular row at once.
566       - We can set matrix entries either using either
567         MatSetValuesLocal() or MatSetValues().
568      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
569 
570   /*
571      Set matrix rows corresponding to boundary data
572   */
573   if (mstart == 0) {
574     v[0] = 0.0;
575     PetscCall(MatSetValues(BB,1,&mstart,1,&mstart,v,INSERT_VALUES));
576     mstart++;
577   }
578   if (mend == appctx->m) {
579     mend--;
580     v[0] = 0.0;
581     PetscCall(MatSetValues(BB,1,&mend,1,&mend,v,INSERT_VALUES));
582   }
583 
584   /*
585      Set matrix rows corresponding to interior data.  We construct the
586      matrix one row at a time.
587   */
588   sc = 1.0/(appctx->h*appctx->h*2.0*(1.0+t)*(1.0+t));
589   for (i=mstart; i<mend; i++) {
590     idx[0] = i-1; idx[1] = i; idx[2] = i+1;
591     is     = i - mstart + 1;
592     v[0]   = sc*localptr[is];
593     v[1]   = sc*(localptr[is+1] + localptr[is-1] - 4.0*localptr[is]);
594     v[2]   = sc*localptr[is];
595     PetscCall(MatSetValues(BB,1,&i,3,idx,v,INSERT_VALUES));
596   }
597 
598   /*
599      Restore vector
600   */
601   PetscCall(VecRestoreArrayRead(local_in,&localptr));
602 
603   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
604      Complete the matrix assembly process and set some options
605      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
606   /*
607      Assemble matrix, using the 2-step process:
608        MatAssemblyBegin(), MatAssemblyEnd()
609      Computations can be done while messages are in transition
610      by placing code between these two statements.
611   */
612   PetscCall(MatAssemblyBegin(BB,MAT_FINAL_ASSEMBLY));
613   PetscCall(MatAssemblyEnd(BB,MAT_FINAL_ASSEMBLY));
614   if (BB != AA) {
615     PetscCall(MatAssemblyBegin(AA,MAT_FINAL_ASSEMBLY));
616     PetscCall(MatAssemblyEnd(AA,MAT_FINAL_ASSEMBLY));
617   }
618 
619   /*
620      Set and option to indicate that we will never add a new nonzero location
621      to the matrix. If we do, it will generate an error.
622   */
623   PetscCall(MatSetOption(BB,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE));
624 
625   return 0;
626 }
627 
628 /*TEST
629 
630     test:
631       args: -nox -ts_dt 10 -mymonitor
632       nsize: 2
633       requires: !single
634 
635     test:
636       suffix: tut_1
637       nsize: 1
638       args: -ts_max_steps 10 -ts_monitor
639 
640     test:
641       suffix: tut_2
642       nsize: 4
643       args: -ts_max_steps 10 -ts_monitor -snes_monitor -ksp_monitor
644 
645     test:
646       suffix: tut_3
647       nsize: 4
648       args: ./ex2 -ts_max_steps 10 -ts_monitor -M 128
649 
650 TEST*/
651