xref: /petsc/src/ts/tutorials/ex2.c (revision f97672e55eacc8688507b9471cd7ec2664d7f203)
1 
2 static char help[] ="Solves a time-dependent nonlinear PDE. Uses implicit\n\
3 timestepping.  Runtime options include:\n\
4   -M <xg>, where <xg> = number of grid points\n\
5   -debug : Activate debugging printouts\n\
6   -nox   : Deactivate x-window graphics\n\n";
7 
8 /* ------------------------------------------------------------------------
9 
10    This program solves the PDE
11 
12                u * u_xx
13          u_t = ---------
14                2*(t+1)^2
15 
16     on the domain 0 <= x <= 1, with boundary conditions
17          u(t,0) = t + 1,  u(t,1) = 2*t + 2,
18     and initial condition
19          u(0,x) = 1 + x*x.
20 
21     The exact solution is:
22          u(t,x) = (1 + x*x) * (1 + t)
23 
24     Note that since the solution is linear in time and quadratic in x,
25     the finite difference scheme actually computes the "exact" solution.
26 
27     We use by default the backward Euler method.
28 
29   ------------------------------------------------------------------------- */
30 
31 /*
32    Include "petscts.h" to use the PETSc timestepping routines. Note that
33    this file automatically includes "petscsys.h" and other lower-level
34    PETSc include files.
35 
36    Include the "petscdmda.h" to allow us to use the distributed array data
37    structures to manage the parallel grid.
38 */
39 #include <petscts.h>
40 #include <petscdm.h>
41 #include <petscdmda.h>
42 #include <petscdraw.h>
43 
44 /*
45    User-defined application context - contains data needed by the
46    application-provided callback routines.
47 */
48 typedef struct {
49   MPI_Comm  comm;           /* communicator */
50   DM        da;             /* distributed array data structure */
51   Vec       localwork;      /* local ghosted work vector */
52   Vec       u_local;        /* local ghosted approximate solution vector */
53   Vec       solution;       /* global exact solution vector */
54   PetscInt  m;              /* total number of grid points */
55   PetscReal h;              /* mesh width: h = 1/(m-1) */
56   PetscBool debug;          /* flag (1 indicates activation of debugging printouts) */
57 } AppCtx;
58 
59 /*
60    User-defined routines, provided below.
61 */
62 extern PetscErrorCode InitialConditions(Vec,AppCtx*);
63 extern PetscErrorCode RHSFunction(TS,PetscReal,Vec,Vec,void*);
64 extern PetscErrorCode RHSJacobian(TS,PetscReal,Vec,Mat,Mat,void*);
65 extern PetscErrorCode Monitor(TS,PetscInt,PetscReal,Vec,void*);
66 extern PetscErrorCode ExactSolution(PetscReal,Vec,AppCtx*);
67 
68 int main(int argc,char **argv)
69 {
70   AppCtx         appctx;                 /* user-defined application context */
71   TS             ts;                     /* timestepping context */
72   Mat            A;                      /* Jacobian matrix data structure */
73   Vec            u;                      /* approximate solution vector */
74   PetscInt       time_steps_max = 100;  /* default max timesteps */
75   PetscReal      dt;
76   PetscReal      time_total_max = 100.0; /* default max total time */
77   PetscBool      mymonitor      = PETSC_FALSE;
78   PetscReal      bounds[]       = {1.0, 3.3};
79 
80   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
81      Initialize program and set problem parameters
82      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
83 
84   PetscCall(PetscInitialize(&argc,&argv,(char*)0,help));
85   PetscCall(PetscViewerDrawSetBounds(PETSC_VIEWER_DRAW_(PETSC_COMM_WORLD),1,bounds));
86 
87   appctx.comm = PETSC_COMM_WORLD;
88   appctx.m    = 60;
89 
90   PetscCall(PetscOptionsGetInt(NULL,NULL,"-M",&appctx.m,NULL));
91   PetscCall(PetscOptionsHasName(NULL,NULL,"-debug",&appctx.debug));
92   PetscCall(PetscOptionsHasName(NULL,NULL,"-mymonitor",&mymonitor));
93 
94   appctx.h    = 1.0/(appctx.m-1.0);
95 
96   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
97      Create vector data structures
98      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
99 
100   /*
101      Create distributed array (DMDA) to manage parallel grid and vectors
102      and to set up the ghost point communication pattern.  There are M
103      total grid values spread equally among all the processors.
104   */
105   PetscCall(DMDACreate1d(PETSC_COMM_WORLD,DM_BOUNDARY_NONE,appctx.m,1,1,NULL,&appctx.da));
106   PetscCall(DMSetFromOptions(appctx.da));
107   PetscCall(DMSetUp(appctx.da));
108 
109   /*
110      Extract global and local vectors from DMDA; we use these to store the
111      approximate solution.  Then duplicate these for remaining vectors that
112      have the same types.
113   */
114   PetscCall(DMCreateGlobalVector(appctx.da,&u));
115   PetscCall(DMCreateLocalVector(appctx.da,&appctx.u_local));
116 
117   /*
118      Create local work vector for use in evaluating right-hand-side function;
119      create global work vector for storing exact solution.
120   */
121   PetscCall(VecDuplicate(appctx.u_local,&appctx.localwork));
122   PetscCall(VecDuplicate(u,&appctx.solution));
123 
124   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
125      Create timestepping solver context; set callback routine for
126      right-hand-side function evaluation.
127      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
128 
129   PetscCall(TSCreate(PETSC_COMM_WORLD,&ts));
130   PetscCall(TSSetProblemType(ts,TS_NONLINEAR));
131   PetscCall(TSSetRHSFunction(ts,NULL,RHSFunction,&appctx));
132 
133   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
134      Set optional user-defined monitoring routine
135      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
136 
137   if (mymonitor) {
138     PetscCall(TSMonitorSet(ts,Monitor,&appctx,NULL));
139   }
140 
141   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
142      For nonlinear problems, the user can provide a Jacobian evaluation
143      routine (or use a finite differencing approximation).
144 
145      Create matrix data structure; set Jacobian evaluation routine.
146      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
147 
148   PetscCall(MatCreate(PETSC_COMM_WORLD,&A));
149   PetscCall(MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,appctx.m,appctx.m));
150   PetscCall(MatSetFromOptions(A));
151   PetscCall(MatSetUp(A));
152   PetscCall(TSSetRHSJacobian(ts,A,A,RHSJacobian,&appctx));
153 
154   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
155      Set solution vector and initial timestep
156      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
157 
158   dt   = appctx.h/2.0;
159   PetscCall(TSSetTimeStep(ts,dt));
160 
161   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
162      Customize timestepping solver:
163        - Set the solution method to be the Backward Euler method.
164        - Set timestepping duration info
165      Then set runtime options, which can override these defaults.
166      For example,
167           -ts_max_steps <maxsteps> -ts_max_time <maxtime>
168      to override the defaults set by TSSetMaxSteps()/TSSetMaxTime().
169      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
170 
171   PetscCall(TSSetType(ts,TSBEULER));
172   PetscCall(TSSetMaxSteps(ts,time_steps_max));
173   PetscCall(TSSetMaxTime(ts,time_total_max));
174   PetscCall(TSSetExactFinalTime(ts,TS_EXACTFINALTIME_STEPOVER));
175   PetscCall(TSSetFromOptions(ts));
176 
177   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
178      Solve the problem
179      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
180 
181   /*
182      Evaluate initial conditions
183   */
184   PetscCall(InitialConditions(u,&appctx));
185 
186   /*
187      Run the timestepping solver
188   */
189   PetscCall(TSSolve(ts,u));
190 
191   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
192      Free work space.  All PETSc objects should be destroyed when they
193      are no longer needed.
194      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
195 
196   PetscCall(TSDestroy(&ts));
197   PetscCall(VecDestroy(&u));
198   PetscCall(MatDestroy(&A));
199   PetscCall(DMDestroy(&appctx.da));
200   PetscCall(VecDestroy(&appctx.localwork));
201   PetscCall(VecDestroy(&appctx.solution));
202   PetscCall(VecDestroy(&appctx.u_local));
203 
204   /*
205      Always call PetscFinalize() before exiting a program.  This routine
206        - finalizes the PETSc libraries as well as MPI
207        - provides summary and diagnostic information if certain runtime
208          options are chosen (e.g., -log_view).
209   */
210   PetscCall(PetscFinalize());
211   return 0;
212 }
213 /* --------------------------------------------------------------------- */
214 /*
215    InitialConditions - Computes the solution at the initial time.
216 
217    Input Parameters:
218    u - uninitialized solution vector (global)
219    appctx - user-defined application context
220 
221    Output Parameter:
222    u - vector with solution at initial time (global)
223 */
224 PetscErrorCode InitialConditions(Vec u,AppCtx *appctx)
225 {
226   PetscScalar    *u_localptr,h = appctx->h,x;
227   PetscInt       i,mybase,myend;
228 
229   /*
230      Determine starting point of each processor's range of
231      grid values.
232   */
233   PetscCall(VecGetOwnershipRange(u,&mybase,&myend));
234 
235   /*
236     Get a pointer to vector data.
237     - For default PETSc vectors, VecGetArray() returns a pointer to
238       the data array.  Otherwise, the routine is implementation dependent.
239     - You MUST call VecRestoreArray() when you no longer need access to
240       the array.
241     - Note that the Fortran interface to VecGetArray() differs from the
242       C version.  See the users manual for details.
243   */
244   PetscCall(VecGetArray(u,&u_localptr));
245 
246   /*
247      We initialize the solution array by simply writing the solution
248      directly into the array locations.  Alternatively, we could use
249      VecSetValues() or VecSetValuesLocal().
250   */
251   for (i=mybase; i<myend; i++) {
252     x = h*(PetscReal)i; /* current location in global grid */
253     u_localptr[i-mybase] = 1.0 + x*x;
254   }
255 
256   /*
257      Restore vector
258   */
259   PetscCall(VecRestoreArray(u,&u_localptr));
260 
261   /*
262      Print debugging information if desired
263   */
264   if (appctx->debug) {
265     PetscCall(PetscPrintf(appctx->comm,"initial guess vector\n"));
266     PetscCall(VecView(u,PETSC_VIEWER_STDOUT_WORLD));
267   }
268 
269   return 0;
270 }
271 /* --------------------------------------------------------------------- */
272 /*
273    ExactSolution - Computes the exact solution at a given time.
274 
275    Input Parameters:
276    t - current time
277    solution - vector in which exact solution will be computed
278    appctx - user-defined application context
279 
280    Output Parameter:
281    solution - vector with the newly computed exact solution
282 */
283 PetscErrorCode ExactSolution(PetscReal t,Vec solution,AppCtx *appctx)
284 {
285   PetscScalar    *s_localptr,h = appctx->h,x;
286   PetscInt       i,mybase,myend;
287 
288   /*
289      Determine starting and ending points of each processor's
290      range of grid values
291   */
292   PetscCall(VecGetOwnershipRange(solution,&mybase,&myend));
293 
294   /*
295      Get a pointer to vector data.
296   */
297   PetscCall(VecGetArray(solution,&s_localptr));
298 
299   /*
300      Simply write the solution directly into the array locations.
301      Alternatively, we could use VecSetValues() or VecSetValuesLocal().
302   */
303   for (i=mybase; i<myend; i++) {
304     x = h*(PetscReal)i;
305     s_localptr[i-mybase] = (t + 1.0)*(1.0 + x*x);
306   }
307 
308   /*
309      Restore vector
310   */
311   PetscCall(VecRestoreArray(solution,&s_localptr));
312   return 0;
313 }
314 /* --------------------------------------------------------------------- */
315 /*
316    Monitor - User-provided routine to monitor the solution computed at
317    each timestep.  This example plots the solution and computes the
318    error in two different norms.
319 
320    Input Parameters:
321    ts     - the timestep context
322    step   - the count of the current step (with 0 meaning the
323             initial condition)
324    time   - the current time
325    u      - the solution at this timestep
326    ctx    - the user-provided context for this monitoring routine.
327             In this case we use the application context which contains
328             information about the problem size, workspace and the exact
329             solution.
330 */
331 PetscErrorCode Monitor(TS ts,PetscInt step,PetscReal time,Vec u,void *ctx)
332 {
333   AppCtx         *appctx = (AppCtx*) ctx;   /* user-defined application context */
334   PetscReal      en2,en2s,enmax;
335   PetscDraw      draw;
336 
337   /*
338      We use the default X Windows viewer
339              PETSC_VIEWER_DRAW_(appctx->comm)
340      that is associated with the current communicator. This saves
341      the effort of calling PetscViewerDrawOpen() to create the window.
342      Note that if we wished to plot several items in separate windows we
343      would create each viewer with PetscViewerDrawOpen() and store them in
344      the application context, appctx.
345 
346      PetscReal buffering makes graphics look better.
347   */
348   PetscCall(PetscViewerDrawGetDraw(PETSC_VIEWER_DRAW_(appctx->comm),0,&draw));
349   PetscCall(PetscDrawSetDoubleBuffer(draw));
350   PetscCall(VecView(u,PETSC_VIEWER_DRAW_(appctx->comm)));
351 
352   /*
353      Compute the exact solution at this timestep
354   */
355   PetscCall(ExactSolution(time,appctx->solution,appctx));
356 
357   /*
358      Print debugging information if desired
359   */
360   if (appctx->debug) {
361     PetscCall(PetscPrintf(appctx->comm,"Computed solution vector\n"));
362     PetscCall(VecView(u,PETSC_VIEWER_STDOUT_WORLD));
363     PetscCall(PetscPrintf(appctx->comm,"Exact solution vector\n"));
364     PetscCall(VecView(appctx->solution,PETSC_VIEWER_STDOUT_WORLD));
365   }
366 
367   /*
368      Compute the 2-norm and max-norm of the error
369   */
370   PetscCall(VecAXPY(appctx->solution,-1.0,u));
371   PetscCall(VecNorm(appctx->solution,NORM_2,&en2));
372   en2s = PetscSqrtReal(appctx->h)*en2;  /* scale the 2-norm by the grid spacing */
373   PetscCall(VecNorm(appctx->solution,NORM_MAX,&enmax));
374 
375   /*
376      PetscPrintf() causes only the first processor in this
377      communicator to print the timestep information.
378   */
379   PetscCall(PetscPrintf(appctx->comm,"Timestep %" PetscInt_FMT ": time = %g 2-norm error = %g  max norm error = %g\n",step,(double)time,(double)en2s,(double)enmax));
380 
381   /*
382      Print debugging information if desired
383   */
384   if (appctx->debug) {
385     PetscCall(PetscPrintf(appctx->comm,"Error vector\n"));
386     PetscCall(VecView(appctx->solution,PETSC_VIEWER_STDOUT_WORLD));
387   }
388   return 0;
389 }
390 /* --------------------------------------------------------------------- */
391 /*
392    RHSFunction - User-provided routine that evalues the right-hand-side
393    function of the ODE.  This routine is set in the main program by
394    calling TSSetRHSFunction().  We compute:
395           global_out = F(global_in)
396 
397    Input Parameters:
398    ts         - timesteping context
399    t          - current time
400    global_in  - vector containing the current iterate
401    ctx        - (optional) user-provided context for function evaluation.
402                 In this case we use the appctx defined above.
403 
404    Output Parameter:
405    global_out - vector containing the newly evaluated function
406 */
407 PetscErrorCode RHSFunction(TS ts,PetscReal t,Vec global_in,Vec global_out,void *ctx)
408 {
409   AppCtx            *appctx   = (AppCtx*) ctx;     /* user-defined application context */
410   DM                da        = appctx->da;        /* distributed array */
411   Vec               local_in  = appctx->u_local;   /* local ghosted input vector */
412   Vec               localwork = appctx->localwork; /* local ghosted work vector */
413   PetscInt          i,localsize;
414   PetscMPIInt       rank,size;
415   PetscScalar       *copyptr,sc;
416   const PetscScalar *localptr;
417 
418   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
419      Get ready for local function computations
420      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
421   /*
422      Scatter ghost points to local vector, using the 2-step process
423         DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
424      By placing code between these two statements, computations can be
425      done while messages are in transition.
426   */
427   PetscCall(DMGlobalToLocalBegin(da,global_in,INSERT_VALUES,local_in));
428   PetscCall(DMGlobalToLocalEnd(da,global_in,INSERT_VALUES,local_in));
429 
430   /*
431       Access directly the values in our local INPUT work array
432   */
433   PetscCall(VecGetArrayRead(local_in,&localptr));
434 
435   /*
436       Access directly the values in our local OUTPUT work array
437   */
438   PetscCall(VecGetArray(localwork,&copyptr));
439 
440   sc = 1.0/(appctx->h*appctx->h*2.0*(1.0+t)*(1.0+t));
441 
442   /*
443       Evaluate our function on the nodes owned by this processor
444   */
445   PetscCall(VecGetLocalSize(local_in,&localsize));
446 
447   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
448      Compute entries for the locally owned part
449      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
450 
451   /*
452      Handle boundary conditions: This is done by using the boundary condition
453         u(t,boundary) = g(t,boundary)
454      for some function g. Now take the derivative with respect to t to obtain
455         u_{t}(t,boundary) = g_{t}(t,boundary)
456 
457      In our case, u(t,0) = t + 1, so that u_{t}(t,0) = 1
458              and  u(t,1) = 2t+ 2, so that u_{t}(t,1) = 2
459   */
460   PetscCallMPI(MPI_Comm_rank(appctx->comm,&rank));
461   PetscCallMPI(MPI_Comm_size(appctx->comm,&size));
462   if (rank == 0)          copyptr[0]           = 1.0;
463   if (rank == size-1) copyptr[localsize-1] = 2.0;
464 
465   /*
466      Handle the interior nodes where the PDE is replace by finite
467      difference operators.
468   */
469   for (i=1; i<localsize-1; i++) copyptr[i] =  localptr[i] * sc * (localptr[i+1] + localptr[i-1] - 2.0*localptr[i]);
470 
471   /*
472      Restore vectors
473   */
474   PetscCall(VecRestoreArrayRead(local_in,&localptr));
475   PetscCall(VecRestoreArray(localwork,&copyptr));
476 
477   /*
478      Insert values from the local OUTPUT vector into the global
479      output vector
480   */
481   PetscCall(DMLocalToGlobalBegin(da,localwork,INSERT_VALUES,global_out));
482   PetscCall(DMLocalToGlobalEnd(da,localwork,INSERT_VALUES,global_out));
483 
484   /* Print debugging information if desired */
485   if (appctx->debug) {
486     PetscCall(PetscPrintf(appctx->comm,"RHS function vector\n"));
487     PetscCall(VecView(global_out,PETSC_VIEWER_STDOUT_WORLD));
488   }
489 
490   return 0;
491 }
492 /* --------------------------------------------------------------------- */
493 /*
494    RHSJacobian - User-provided routine to compute the Jacobian of
495    the nonlinear right-hand-side function of the ODE.
496 
497    Input Parameters:
498    ts - the TS context
499    t - current time
500    global_in - global input vector
501    dummy - optional user-defined context, as set by TSetRHSJacobian()
502 
503    Output Parameters:
504    AA - Jacobian matrix
505    BB - optionally different preconditioning matrix
506    str - flag indicating matrix structure
507 
508   Notes:
509   RHSJacobian computes entries for the locally owned part of the Jacobian.
510    - Currently, all PETSc parallel matrix formats are partitioned by
511      contiguous chunks of rows across the processors.
512    - Each processor needs to insert only elements that it owns
513      locally (but any non-local elements will be sent to the
514      appropriate processor during matrix assembly).
515    - Always specify global row and columns of matrix entries when
516      using MatSetValues().
517    - Here, we set all entries for a particular row at once.
518    - Note that MatSetValues() uses 0-based row and column numbers
519      in Fortran as well as in C.
520 */
521 PetscErrorCode RHSJacobian(TS ts,PetscReal t,Vec global_in,Mat AA,Mat BB,void *ctx)
522 {
523   AppCtx            *appctx  = (AppCtx*)ctx;    /* user-defined application context */
524   Vec               local_in = appctx->u_local;   /* local ghosted input vector */
525   DM                da       = appctx->da;        /* distributed array */
526   PetscScalar       v[3],sc;
527   const PetscScalar *localptr;
528   PetscInt          i,mstart,mend,mstarts,mends,idx[3],is;
529 
530   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
531      Get ready for local Jacobian computations
532      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
533   /*
534      Scatter ghost points to local vector, using the 2-step process
535         DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
536      By placing code between these two statements, computations can be
537      done while messages are in transition.
538   */
539   PetscCall(DMGlobalToLocalBegin(da,global_in,INSERT_VALUES,local_in));
540   PetscCall(DMGlobalToLocalEnd(da,global_in,INSERT_VALUES,local_in));
541 
542   /*
543      Get pointer to vector data
544   */
545   PetscCall(VecGetArrayRead(local_in,&localptr));
546 
547   /*
548      Get starting and ending locally owned rows of the matrix
549   */
550   PetscCall(MatGetOwnershipRange(BB,&mstarts,&mends));
551   mstart = mstarts; mend = mends;
552 
553   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
554      Compute entries for the locally owned part of the Jacobian.
555       - Currently, all PETSc parallel matrix formats are partitioned by
556         contiguous chunks of rows across the processors.
557       - Each processor needs to insert only elements that it owns
558         locally (but any non-local elements will be sent to the
559         appropriate processor during matrix assembly).
560       - Here, we set all entries for a particular row at once.
561       - We can set matrix entries either using either
562         MatSetValuesLocal() or MatSetValues().
563      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
564 
565   /*
566      Set matrix rows corresponding to boundary data
567   */
568   if (mstart == 0) {
569     v[0] = 0.0;
570     PetscCall(MatSetValues(BB,1,&mstart,1,&mstart,v,INSERT_VALUES));
571     mstart++;
572   }
573   if (mend == appctx->m) {
574     mend--;
575     v[0] = 0.0;
576     PetscCall(MatSetValues(BB,1,&mend,1,&mend,v,INSERT_VALUES));
577   }
578 
579   /*
580      Set matrix rows corresponding to interior data.  We construct the
581      matrix one row at a time.
582   */
583   sc = 1.0/(appctx->h*appctx->h*2.0*(1.0+t)*(1.0+t));
584   for (i=mstart; i<mend; i++) {
585     idx[0] = i-1; idx[1] = i; idx[2] = i+1;
586     is     = i - mstart + 1;
587     v[0]   = sc*localptr[is];
588     v[1]   = sc*(localptr[is+1] + localptr[is-1] - 4.0*localptr[is]);
589     v[2]   = sc*localptr[is];
590     PetscCall(MatSetValues(BB,1,&i,3,idx,v,INSERT_VALUES));
591   }
592 
593   /*
594      Restore vector
595   */
596   PetscCall(VecRestoreArrayRead(local_in,&localptr));
597 
598   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
599      Complete the matrix assembly process and set some options
600      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
601   /*
602      Assemble matrix, using the 2-step process:
603        MatAssemblyBegin(), MatAssemblyEnd()
604      Computations can be done while messages are in transition
605      by placing code between these two statements.
606   */
607   PetscCall(MatAssemblyBegin(BB,MAT_FINAL_ASSEMBLY));
608   PetscCall(MatAssemblyEnd(BB,MAT_FINAL_ASSEMBLY));
609   if (BB != AA) {
610     PetscCall(MatAssemblyBegin(AA,MAT_FINAL_ASSEMBLY));
611     PetscCall(MatAssemblyEnd(AA,MAT_FINAL_ASSEMBLY));
612   }
613 
614   /*
615      Set and option to indicate that we will never add a new nonzero location
616      to the matrix. If we do, it will generate an error.
617   */
618   PetscCall(MatSetOption(BB,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE));
619 
620   return 0;
621 }
622 
623 /*TEST
624 
625     test:
626       args: -nox -ts_dt 10 -mymonitor
627       nsize: 2
628       requires: !single
629 
630     test:
631       suffix: tut_1
632       nsize: 1
633       args: -ts_max_steps 10 -ts_monitor
634 
635     test:
636       suffix: tut_2
637       nsize: 4
638       args: -ts_max_steps 10 -ts_monitor -snes_monitor -ksp_monitor
639 
640     test:
641       suffix: tut_3
642       nsize: 4
643       args: ./ex2 -ts_max_steps 10 -ts_monitor -M 128
644 
645 TEST*/
646