xref: /petsc/src/ts/tutorials/ex2.c (revision 327415f76d85372a4417cf1aaa14db707d4d6c04)
1c4762a1bSJed Brown 
2c4762a1bSJed Brown static char help[] ="Solves a time-dependent nonlinear PDE. Uses implicit\n\
3c4762a1bSJed Brown timestepping.  Runtime options include:\n\
4c4762a1bSJed Brown   -M <xg>, where <xg> = number of grid points\n\
5c4762a1bSJed Brown   -debug : Activate debugging printouts\n\
6c4762a1bSJed Brown   -nox   : Deactivate x-window graphics\n\n";
7c4762a1bSJed Brown 
8c4762a1bSJed Brown /* ------------------------------------------------------------------------
9c4762a1bSJed Brown 
10c4762a1bSJed Brown    This program solves the PDE
11c4762a1bSJed Brown 
12c4762a1bSJed Brown                u * u_xx
13c4762a1bSJed Brown          u_t = ---------
14c4762a1bSJed Brown                2*(t+1)^2
15c4762a1bSJed Brown 
16c4762a1bSJed Brown     on the domain 0 <= x <= 1, with boundary conditions
17c4762a1bSJed Brown          u(t,0) = t + 1,  u(t,1) = 2*t + 2,
18c4762a1bSJed Brown     and initial condition
19c4762a1bSJed Brown          u(0,x) = 1 + x*x.
20c4762a1bSJed Brown 
21c4762a1bSJed Brown     The exact solution is:
22c4762a1bSJed Brown          u(t,x) = (1 + x*x) * (1 + t)
23c4762a1bSJed Brown 
24c4762a1bSJed Brown     Note that since the solution is linear in time and quadratic in x,
25c4762a1bSJed Brown     the finite difference scheme actually computes the "exact" solution.
26c4762a1bSJed Brown 
27c4762a1bSJed Brown     We use by default the backward Euler method.
28c4762a1bSJed Brown 
29c4762a1bSJed Brown   ------------------------------------------------------------------------- */
30c4762a1bSJed Brown 
31c4762a1bSJed Brown /*
32c4762a1bSJed Brown    Include "petscts.h" to use the PETSc timestepping routines. Note that
33c4762a1bSJed Brown    this file automatically includes "petscsys.h" and other lower-level
34c4762a1bSJed Brown    PETSc include files.
35c4762a1bSJed Brown 
36c4762a1bSJed Brown    Include the "petscdmda.h" to allow us to use the distributed array data
37c4762a1bSJed Brown    structures to manage the parallel grid.
38c4762a1bSJed Brown */
39c4762a1bSJed Brown #include <petscts.h>
40c4762a1bSJed Brown #include <petscdm.h>
41c4762a1bSJed Brown #include <petscdmda.h>
42c4762a1bSJed Brown #include <petscdraw.h>
43c4762a1bSJed Brown 
44c4762a1bSJed Brown /*
45c4762a1bSJed Brown    User-defined application context - contains data needed by the
46c4762a1bSJed Brown    application-provided callback routines.
47c4762a1bSJed Brown */
48c4762a1bSJed Brown typedef struct {
49c4762a1bSJed Brown   MPI_Comm  comm;           /* communicator */
50c4762a1bSJed Brown   DM        da;             /* distributed array data structure */
51c4762a1bSJed Brown   Vec       localwork;      /* local ghosted work vector */
52c4762a1bSJed Brown   Vec       u_local;        /* local ghosted approximate solution vector */
53c4762a1bSJed Brown   Vec       solution;       /* global exact solution vector */
54c4762a1bSJed Brown   PetscInt  m;              /* total number of grid points */
55c4762a1bSJed Brown   PetscReal h;              /* mesh width: h = 1/(m-1) */
56c4762a1bSJed Brown   PetscBool debug;          /* flag (1 indicates activation of debugging printouts) */
57c4762a1bSJed Brown } AppCtx;
58c4762a1bSJed Brown 
59c4762a1bSJed Brown /*
60c4762a1bSJed Brown    User-defined routines, provided below.
61c4762a1bSJed Brown */
62c4762a1bSJed Brown extern PetscErrorCode InitialConditions(Vec,AppCtx*);
63c4762a1bSJed Brown extern PetscErrorCode RHSFunction(TS,PetscReal,Vec,Vec,void*);
64c4762a1bSJed Brown extern PetscErrorCode RHSJacobian(TS,PetscReal,Vec,Mat,Mat,void*);
65c4762a1bSJed Brown extern PetscErrorCode Monitor(TS,PetscInt,PetscReal,Vec,void*);
66c4762a1bSJed Brown extern PetscErrorCode ExactSolution(PetscReal,Vec,AppCtx*);
67c4762a1bSJed Brown 
68c4762a1bSJed Brown int main(int argc,char **argv)
69c4762a1bSJed Brown {
70c4762a1bSJed Brown   AppCtx         appctx;                 /* user-defined application context */
71c4762a1bSJed Brown   TS             ts;                     /* timestepping context */
72c4762a1bSJed Brown   Mat            A;                      /* Jacobian matrix data structure */
73c4762a1bSJed Brown   Vec            u;                      /* approximate solution vector */
74c4762a1bSJed Brown   PetscInt       time_steps_max = 100;  /* default max timesteps */
75c4762a1bSJed Brown   PetscReal      dt;
76c4762a1bSJed Brown   PetscReal      time_total_max = 100.0; /* default max total time */
77c4762a1bSJed Brown   PetscBool      mymonitor      = PETSC_FALSE;
78c4762a1bSJed Brown   PetscReal      bounds[]       = {1.0, 3.3};
79c4762a1bSJed Brown 
80c4762a1bSJed Brown   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
81c4762a1bSJed Brown      Initialize program and set problem parameters
82c4762a1bSJed Brown      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
83c4762a1bSJed Brown 
84*327415f7SBarry Smith   PetscFunctionBeginUser;
859566063dSJacob Faibussowitsch   PetscCall(PetscInitialize(&argc,&argv,(char*)0,help));
869566063dSJacob Faibussowitsch   PetscCall(PetscViewerDrawSetBounds(PETSC_VIEWER_DRAW_(PETSC_COMM_WORLD),1,bounds));
87c4762a1bSJed Brown 
88c4762a1bSJed Brown   appctx.comm = PETSC_COMM_WORLD;
89c4762a1bSJed Brown   appctx.m    = 60;
90c4762a1bSJed Brown 
919566063dSJacob Faibussowitsch   PetscCall(PetscOptionsGetInt(NULL,NULL,"-M",&appctx.m,NULL));
929566063dSJacob Faibussowitsch   PetscCall(PetscOptionsHasName(NULL,NULL,"-debug",&appctx.debug));
939566063dSJacob Faibussowitsch   PetscCall(PetscOptionsHasName(NULL,NULL,"-mymonitor",&mymonitor));
94c4762a1bSJed Brown 
95c4762a1bSJed Brown   appctx.h    = 1.0/(appctx.m-1.0);
96c4762a1bSJed Brown 
97c4762a1bSJed Brown   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
98c4762a1bSJed Brown      Create vector data structures
99c4762a1bSJed Brown      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
100c4762a1bSJed Brown 
101c4762a1bSJed Brown   /*
102c4762a1bSJed Brown      Create distributed array (DMDA) to manage parallel grid and vectors
103c4762a1bSJed Brown      and to set up the ghost point communication pattern.  There are M
104c4762a1bSJed Brown      total grid values spread equally among all the processors.
105c4762a1bSJed Brown   */
1069566063dSJacob Faibussowitsch   PetscCall(DMDACreate1d(PETSC_COMM_WORLD,DM_BOUNDARY_NONE,appctx.m,1,1,NULL,&appctx.da));
1079566063dSJacob Faibussowitsch   PetscCall(DMSetFromOptions(appctx.da));
1089566063dSJacob Faibussowitsch   PetscCall(DMSetUp(appctx.da));
109c4762a1bSJed Brown 
110c4762a1bSJed Brown   /*
111c4762a1bSJed Brown      Extract global and local vectors from DMDA; we use these to store the
112c4762a1bSJed Brown      approximate solution.  Then duplicate these for remaining vectors that
113c4762a1bSJed Brown      have the same types.
114c4762a1bSJed Brown   */
1159566063dSJacob Faibussowitsch   PetscCall(DMCreateGlobalVector(appctx.da,&u));
1169566063dSJacob Faibussowitsch   PetscCall(DMCreateLocalVector(appctx.da,&appctx.u_local));
117c4762a1bSJed Brown 
118c4762a1bSJed Brown   /*
119c4762a1bSJed Brown      Create local work vector for use in evaluating right-hand-side function;
120c4762a1bSJed Brown      create global work vector for storing exact solution.
121c4762a1bSJed Brown   */
1229566063dSJacob Faibussowitsch   PetscCall(VecDuplicate(appctx.u_local,&appctx.localwork));
1239566063dSJacob Faibussowitsch   PetscCall(VecDuplicate(u,&appctx.solution));
124c4762a1bSJed Brown 
125c4762a1bSJed Brown   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
126c4762a1bSJed Brown      Create timestepping solver context; set callback routine for
127c4762a1bSJed Brown      right-hand-side function evaluation.
128c4762a1bSJed Brown      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
129c4762a1bSJed Brown 
1309566063dSJacob Faibussowitsch   PetscCall(TSCreate(PETSC_COMM_WORLD,&ts));
1319566063dSJacob Faibussowitsch   PetscCall(TSSetProblemType(ts,TS_NONLINEAR));
1329566063dSJacob Faibussowitsch   PetscCall(TSSetRHSFunction(ts,NULL,RHSFunction,&appctx));
133c4762a1bSJed Brown 
134c4762a1bSJed Brown   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
135c4762a1bSJed Brown      Set optional user-defined monitoring routine
136c4762a1bSJed Brown      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
137c4762a1bSJed Brown 
138c4762a1bSJed Brown   if (mymonitor) {
1399566063dSJacob Faibussowitsch     PetscCall(TSMonitorSet(ts,Monitor,&appctx,NULL));
140c4762a1bSJed Brown   }
141c4762a1bSJed Brown 
142c4762a1bSJed Brown   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
143c4762a1bSJed Brown      For nonlinear problems, the user can provide a Jacobian evaluation
144c4762a1bSJed Brown      routine (or use a finite differencing approximation).
145c4762a1bSJed Brown 
146c4762a1bSJed Brown      Create matrix data structure; set Jacobian evaluation routine.
147c4762a1bSJed Brown      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
148c4762a1bSJed Brown 
1499566063dSJacob Faibussowitsch   PetscCall(MatCreate(PETSC_COMM_WORLD,&A));
1509566063dSJacob Faibussowitsch   PetscCall(MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,appctx.m,appctx.m));
1519566063dSJacob Faibussowitsch   PetscCall(MatSetFromOptions(A));
1529566063dSJacob Faibussowitsch   PetscCall(MatSetUp(A));
1539566063dSJacob Faibussowitsch   PetscCall(TSSetRHSJacobian(ts,A,A,RHSJacobian,&appctx));
154c4762a1bSJed Brown 
155c4762a1bSJed Brown   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
156c4762a1bSJed Brown      Set solution vector and initial timestep
157c4762a1bSJed Brown      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
158c4762a1bSJed Brown 
159c4762a1bSJed Brown   dt   = appctx.h/2.0;
1609566063dSJacob Faibussowitsch   PetscCall(TSSetTimeStep(ts,dt));
161c4762a1bSJed Brown 
162c4762a1bSJed Brown   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
163c4762a1bSJed Brown      Customize timestepping solver:
164c4762a1bSJed Brown        - Set the solution method to be the Backward Euler method.
165c4762a1bSJed Brown        - Set timestepping duration info
166c4762a1bSJed Brown      Then set runtime options, which can override these defaults.
167c4762a1bSJed Brown      For example,
168c4762a1bSJed Brown           -ts_max_steps <maxsteps> -ts_max_time <maxtime>
169c4762a1bSJed Brown      to override the defaults set by TSSetMaxSteps()/TSSetMaxTime().
170c4762a1bSJed Brown      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
171c4762a1bSJed Brown 
1729566063dSJacob Faibussowitsch   PetscCall(TSSetType(ts,TSBEULER));
1739566063dSJacob Faibussowitsch   PetscCall(TSSetMaxSteps(ts,time_steps_max));
1749566063dSJacob Faibussowitsch   PetscCall(TSSetMaxTime(ts,time_total_max));
1759566063dSJacob Faibussowitsch   PetscCall(TSSetExactFinalTime(ts,TS_EXACTFINALTIME_STEPOVER));
1769566063dSJacob Faibussowitsch   PetscCall(TSSetFromOptions(ts));
177c4762a1bSJed Brown 
178c4762a1bSJed Brown   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
179c4762a1bSJed Brown      Solve the problem
180c4762a1bSJed Brown      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
181c4762a1bSJed Brown 
182c4762a1bSJed Brown   /*
183c4762a1bSJed Brown      Evaluate initial conditions
184c4762a1bSJed Brown   */
1859566063dSJacob Faibussowitsch   PetscCall(InitialConditions(u,&appctx));
186c4762a1bSJed Brown 
187c4762a1bSJed Brown   /*
188c4762a1bSJed Brown      Run the timestepping solver
189c4762a1bSJed Brown   */
1909566063dSJacob Faibussowitsch   PetscCall(TSSolve(ts,u));
191c4762a1bSJed Brown 
192c4762a1bSJed Brown   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
193c4762a1bSJed Brown      Free work space.  All PETSc objects should be destroyed when they
194c4762a1bSJed Brown      are no longer needed.
195c4762a1bSJed Brown      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
196c4762a1bSJed Brown 
1979566063dSJacob Faibussowitsch   PetscCall(TSDestroy(&ts));
1989566063dSJacob Faibussowitsch   PetscCall(VecDestroy(&u));
1999566063dSJacob Faibussowitsch   PetscCall(MatDestroy(&A));
2009566063dSJacob Faibussowitsch   PetscCall(DMDestroy(&appctx.da));
2019566063dSJacob Faibussowitsch   PetscCall(VecDestroy(&appctx.localwork));
2029566063dSJacob Faibussowitsch   PetscCall(VecDestroy(&appctx.solution));
2039566063dSJacob Faibussowitsch   PetscCall(VecDestroy(&appctx.u_local));
204c4762a1bSJed Brown 
205c4762a1bSJed Brown   /*
206c4762a1bSJed Brown      Always call PetscFinalize() before exiting a program.  This routine
207c4762a1bSJed Brown        - finalizes the PETSc libraries as well as MPI
208c4762a1bSJed Brown        - provides summary and diagnostic information if certain runtime
209c4762a1bSJed Brown          options are chosen (e.g., -log_view).
210c4762a1bSJed Brown   */
2119566063dSJacob Faibussowitsch   PetscCall(PetscFinalize());
212b122ec5aSJacob Faibussowitsch   return 0;
213c4762a1bSJed Brown }
214c4762a1bSJed Brown /* --------------------------------------------------------------------- */
215c4762a1bSJed Brown /*
216c4762a1bSJed Brown    InitialConditions - Computes the solution at the initial time.
217c4762a1bSJed Brown 
218c4762a1bSJed Brown    Input Parameters:
219c4762a1bSJed Brown    u - uninitialized solution vector (global)
220c4762a1bSJed Brown    appctx - user-defined application context
221c4762a1bSJed Brown 
222c4762a1bSJed Brown    Output Parameter:
223c4762a1bSJed Brown    u - vector with solution at initial time (global)
224c4762a1bSJed Brown */
225c4762a1bSJed Brown PetscErrorCode InitialConditions(Vec u,AppCtx *appctx)
226c4762a1bSJed Brown {
227c4762a1bSJed Brown   PetscScalar    *u_localptr,h = appctx->h,x;
228c4762a1bSJed Brown   PetscInt       i,mybase,myend;
229c4762a1bSJed Brown 
230c4762a1bSJed Brown   /*
231c4762a1bSJed Brown      Determine starting point of each processor's range of
232c4762a1bSJed Brown      grid values.
233c4762a1bSJed Brown   */
2349566063dSJacob Faibussowitsch   PetscCall(VecGetOwnershipRange(u,&mybase,&myend));
235c4762a1bSJed Brown 
236c4762a1bSJed Brown   /*
237c4762a1bSJed Brown     Get a pointer to vector data.
238c4762a1bSJed Brown     - For default PETSc vectors, VecGetArray() returns a pointer to
239c4762a1bSJed Brown       the data array.  Otherwise, the routine is implementation dependent.
240c4762a1bSJed Brown     - You MUST call VecRestoreArray() when you no longer need access to
241c4762a1bSJed Brown       the array.
242c4762a1bSJed Brown     - Note that the Fortran interface to VecGetArray() differs from the
243c4762a1bSJed Brown       C version.  See the users manual for details.
244c4762a1bSJed Brown   */
2459566063dSJacob Faibussowitsch   PetscCall(VecGetArray(u,&u_localptr));
246c4762a1bSJed Brown 
247c4762a1bSJed Brown   /*
248c4762a1bSJed Brown      We initialize the solution array by simply writing the solution
249c4762a1bSJed Brown      directly into the array locations.  Alternatively, we could use
250c4762a1bSJed Brown      VecSetValues() or VecSetValuesLocal().
251c4762a1bSJed Brown   */
252c4762a1bSJed Brown   for (i=mybase; i<myend; i++) {
253c4762a1bSJed Brown     x = h*(PetscReal)i; /* current location in global grid */
254c4762a1bSJed Brown     u_localptr[i-mybase] = 1.0 + x*x;
255c4762a1bSJed Brown   }
256c4762a1bSJed Brown 
257c4762a1bSJed Brown   /*
258c4762a1bSJed Brown      Restore vector
259c4762a1bSJed Brown   */
2609566063dSJacob Faibussowitsch   PetscCall(VecRestoreArray(u,&u_localptr));
261c4762a1bSJed Brown 
262c4762a1bSJed Brown   /*
263c4762a1bSJed Brown      Print debugging information if desired
264c4762a1bSJed Brown   */
265c4762a1bSJed Brown   if (appctx->debug) {
2669566063dSJacob Faibussowitsch     PetscCall(PetscPrintf(appctx->comm,"initial guess vector\n"));
2679566063dSJacob Faibussowitsch     PetscCall(VecView(u,PETSC_VIEWER_STDOUT_WORLD));
268c4762a1bSJed Brown   }
269c4762a1bSJed Brown 
270c4762a1bSJed Brown   return 0;
271c4762a1bSJed Brown }
272c4762a1bSJed Brown /* --------------------------------------------------------------------- */
273c4762a1bSJed Brown /*
274c4762a1bSJed Brown    ExactSolution - Computes the exact solution at a given time.
275c4762a1bSJed Brown 
276c4762a1bSJed Brown    Input Parameters:
277c4762a1bSJed Brown    t - current time
278c4762a1bSJed Brown    solution - vector in which exact solution will be computed
279c4762a1bSJed Brown    appctx - user-defined application context
280c4762a1bSJed Brown 
281c4762a1bSJed Brown    Output Parameter:
282c4762a1bSJed Brown    solution - vector with the newly computed exact solution
283c4762a1bSJed Brown */
284c4762a1bSJed Brown PetscErrorCode ExactSolution(PetscReal t,Vec solution,AppCtx *appctx)
285c4762a1bSJed Brown {
286c4762a1bSJed Brown   PetscScalar    *s_localptr,h = appctx->h,x;
287c4762a1bSJed Brown   PetscInt       i,mybase,myend;
288c4762a1bSJed Brown 
289c4762a1bSJed Brown   /*
290c4762a1bSJed Brown      Determine starting and ending points of each processor's
291c4762a1bSJed Brown      range of grid values
292c4762a1bSJed Brown   */
2939566063dSJacob Faibussowitsch   PetscCall(VecGetOwnershipRange(solution,&mybase,&myend));
294c4762a1bSJed Brown 
295c4762a1bSJed Brown   /*
296c4762a1bSJed Brown      Get a pointer to vector data.
297c4762a1bSJed Brown   */
2989566063dSJacob Faibussowitsch   PetscCall(VecGetArray(solution,&s_localptr));
299c4762a1bSJed Brown 
300c4762a1bSJed Brown   /*
301c4762a1bSJed Brown      Simply write the solution directly into the array locations.
302c4762a1bSJed Brown      Alternatively, we could use VecSetValues() or VecSetValuesLocal().
303c4762a1bSJed Brown   */
304c4762a1bSJed Brown   for (i=mybase; i<myend; i++) {
305c4762a1bSJed Brown     x = h*(PetscReal)i;
306c4762a1bSJed Brown     s_localptr[i-mybase] = (t + 1.0)*(1.0 + x*x);
307c4762a1bSJed Brown   }
308c4762a1bSJed Brown 
309c4762a1bSJed Brown   /*
310c4762a1bSJed Brown      Restore vector
311c4762a1bSJed Brown   */
3129566063dSJacob Faibussowitsch   PetscCall(VecRestoreArray(solution,&s_localptr));
313c4762a1bSJed Brown   return 0;
314c4762a1bSJed Brown }
315c4762a1bSJed Brown /* --------------------------------------------------------------------- */
316c4762a1bSJed Brown /*
317c4762a1bSJed Brown    Monitor - User-provided routine to monitor the solution computed at
318c4762a1bSJed Brown    each timestep.  This example plots the solution and computes the
319c4762a1bSJed Brown    error in two different norms.
320c4762a1bSJed Brown 
321c4762a1bSJed Brown    Input Parameters:
322c4762a1bSJed Brown    ts     - the timestep context
323c4762a1bSJed Brown    step   - the count of the current step (with 0 meaning the
324c4762a1bSJed Brown             initial condition)
325c4762a1bSJed Brown    time   - the current time
326c4762a1bSJed Brown    u      - the solution at this timestep
327c4762a1bSJed Brown    ctx    - the user-provided context for this monitoring routine.
328c4762a1bSJed Brown             In this case we use the application context which contains
329c4762a1bSJed Brown             information about the problem size, workspace and the exact
330c4762a1bSJed Brown             solution.
331c4762a1bSJed Brown */
332c4762a1bSJed Brown PetscErrorCode Monitor(TS ts,PetscInt step,PetscReal time,Vec u,void *ctx)
333c4762a1bSJed Brown {
334c4762a1bSJed Brown   AppCtx         *appctx = (AppCtx*) ctx;   /* user-defined application context */
335c4762a1bSJed Brown   PetscReal      en2,en2s,enmax;
336c4762a1bSJed Brown   PetscDraw      draw;
337c4762a1bSJed Brown 
338c4762a1bSJed Brown   /*
339e1dfdf8eSBarry Smith      We use the default X Windows viewer
340c4762a1bSJed Brown              PETSC_VIEWER_DRAW_(appctx->comm)
341c4762a1bSJed Brown      that is associated with the current communicator. This saves
342c4762a1bSJed Brown      the effort of calling PetscViewerDrawOpen() to create the window.
343c4762a1bSJed Brown      Note that if we wished to plot several items in separate windows we
344c4762a1bSJed Brown      would create each viewer with PetscViewerDrawOpen() and store them in
345c4762a1bSJed Brown      the application context, appctx.
346c4762a1bSJed Brown 
347c4762a1bSJed Brown      PetscReal buffering makes graphics look better.
348c4762a1bSJed Brown   */
3499566063dSJacob Faibussowitsch   PetscCall(PetscViewerDrawGetDraw(PETSC_VIEWER_DRAW_(appctx->comm),0,&draw));
3509566063dSJacob Faibussowitsch   PetscCall(PetscDrawSetDoubleBuffer(draw));
3519566063dSJacob Faibussowitsch   PetscCall(VecView(u,PETSC_VIEWER_DRAW_(appctx->comm)));
352c4762a1bSJed Brown 
353c4762a1bSJed Brown   /*
354c4762a1bSJed Brown      Compute the exact solution at this timestep
355c4762a1bSJed Brown   */
3569566063dSJacob Faibussowitsch   PetscCall(ExactSolution(time,appctx->solution,appctx));
357c4762a1bSJed Brown 
358c4762a1bSJed Brown   /*
359c4762a1bSJed Brown      Print debugging information if desired
360c4762a1bSJed Brown   */
361c4762a1bSJed Brown   if (appctx->debug) {
3629566063dSJacob Faibussowitsch     PetscCall(PetscPrintf(appctx->comm,"Computed solution vector\n"));
3639566063dSJacob Faibussowitsch     PetscCall(VecView(u,PETSC_VIEWER_STDOUT_WORLD));
3649566063dSJacob Faibussowitsch     PetscCall(PetscPrintf(appctx->comm,"Exact solution vector\n"));
3659566063dSJacob Faibussowitsch     PetscCall(VecView(appctx->solution,PETSC_VIEWER_STDOUT_WORLD));
366c4762a1bSJed Brown   }
367c4762a1bSJed Brown 
368c4762a1bSJed Brown   /*
369c4762a1bSJed Brown      Compute the 2-norm and max-norm of the error
370c4762a1bSJed Brown   */
3719566063dSJacob Faibussowitsch   PetscCall(VecAXPY(appctx->solution,-1.0,u));
3729566063dSJacob Faibussowitsch   PetscCall(VecNorm(appctx->solution,NORM_2,&en2));
373c4762a1bSJed Brown   en2s = PetscSqrtReal(appctx->h)*en2;  /* scale the 2-norm by the grid spacing */
3749566063dSJacob Faibussowitsch   PetscCall(VecNorm(appctx->solution,NORM_MAX,&enmax));
375c4762a1bSJed Brown 
376c4762a1bSJed Brown   /*
377c4762a1bSJed Brown      PetscPrintf() causes only the first processor in this
378c4762a1bSJed Brown      communicator to print the timestep information.
379c4762a1bSJed Brown   */
38063a3b9bcSJacob Faibussowitsch   PetscCall(PetscPrintf(appctx->comm,"Timestep %" PetscInt_FMT ": time = %g 2-norm error = %g  max norm error = %g\n",step,(double)time,(double)en2s,(double)enmax));
381c4762a1bSJed Brown 
382c4762a1bSJed Brown   /*
383c4762a1bSJed Brown      Print debugging information if desired
384c4762a1bSJed Brown   */
385c4762a1bSJed Brown   if (appctx->debug) {
3869566063dSJacob Faibussowitsch     PetscCall(PetscPrintf(appctx->comm,"Error vector\n"));
3879566063dSJacob Faibussowitsch     PetscCall(VecView(appctx->solution,PETSC_VIEWER_STDOUT_WORLD));
388c4762a1bSJed Brown   }
389c4762a1bSJed Brown   return 0;
390c4762a1bSJed Brown }
391c4762a1bSJed Brown /* --------------------------------------------------------------------- */
392c4762a1bSJed Brown /*
393c4762a1bSJed Brown    RHSFunction - User-provided routine that evalues the right-hand-side
394c4762a1bSJed Brown    function of the ODE.  This routine is set in the main program by
395c4762a1bSJed Brown    calling TSSetRHSFunction().  We compute:
396c4762a1bSJed Brown           global_out = F(global_in)
397c4762a1bSJed Brown 
398c4762a1bSJed Brown    Input Parameters:
399c4762a1bSJed Brown    ts         - timesteping context
400c4762a1bSJed Brown    t          - current time
401c4762a1bSJed Brown    global_in  - vector containing the current iterate
402c4762a1bSJed Brown    ctx        - (optional) user-provided context for function evaluation.
403c4762a1bSJed Brown                 In this case we use the appctx defined above.
404c4762a1bSJed Brown 
405c4762a1bSJed Brown    Output Parameter:
406c4762a1bSJed Brown    global_out - vector containing the newly evaluated function
407c4762a1bSJed Brown */
408c4762a1bSJed Brown PetscErrorCode RHSFunction(TS ts,PetscReal t,Vec global_in,Vec global_out,void *ctx)
409c4762a1bSJed Brown {
410c4762a1bSJed Brown   AppCtx            *appctx   = (AppCtx*) ctx;     /* user-defined application context */
411c4762a1bSJed Brown   DM                da        = appctx->da;        /* distributed array */
412c4762a1bSJed Brown   Vec               local_in  = appctx->u_local;   /* local ghosted input vector */
413c4762a1bSJed Brown   Vec               localwork = appctx->localwork; /* local ghosted work vector */
414c4762a1bSJed Brown   PetscInt          i,localsize;
415c4762a1bSJed Brown   PetscMPIInt       rank,size;
416c4762a1bSJed Brown   PetscScalar       *copyptr,sc;
417c4762a1bSJed Brown   const PetscScalar *localptr;
418c4762a1bSJed Brown 
419c4762a1bSJed Brown   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
420c4762a1bSJed Brown      Get ready for local function computations
421c4762a1bSJed Brown      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
422c4762a1bSJed Brown   /*
423c4762a1bSJed Brown      Scatter ghost points to local vector, using the 2-step process
424c4762a1bSJed Brown         DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
425c4762a1bSJed Brown      By placing code between these two statements, computations can be
426c4762a1bSJed Brown      done while messages are in transition.
427c4762a1bSJed Brown   */
4289566063dSJacob Faibussowitsch   PetscCall(DMGlobalToLocalBegin(da,global_in,INSERT_VALUES,local_in));
4299566063dSJacob Faibussowitsch   PetscCall(DMGlobalToLocalEnd(da,global_in,INSERT_VALUES,local_in));
430c4762a1bSJed Brown 
431c4762a1bSJed Brown   /*
432c4762a1bSJed Brown       Access directly the values in our local INPUT work array
433c4762a1bSJed Brown   */
4349566063dSJacob Faibussowitsch   PetscCall(VecGetArrayRead(local_in,&localptr));
435c4762a1bSJed Brown 
436c4762a1bSJed Brown   /*
437c4762a1bSJed Brown       Access directly the values in our local OUTPUT work array
438c4762a1bSJed Brown   */
4399566063dSJacob Faibussowitsch   PetscCall(VecGetArray(localwork,&copyptr));
440c4762a1bSJed Brown 
441c4762a1bSJed Brown   sc = 1.0/(appctx->h*appctx->h*2.0*(1.0+t)*(1.0+t));
442c4762a1bSJed Brown 
443c4762a1bSJed Brown   /*
444c4762a1bSJed Brown       Evaluate our function on the nodes owned by this processor
445c4762a1bSJed Brown   */
4469566063dSJacob Faibussowitsch   PetscCall(VecGetLocalSize(local_in,&localsize));
447c4762a1bSJed Brown 
448c4762a1bSJed Brown   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
449c4762a1bSJed Brown      Compute entries for the locally owned part
450c4762a1bSJed Brown      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
451c4762a1bSJed Brown 
452c4762a1bSJed Brown   /*
453c4762a1bSJed Brown      Handle boundary conditions: This is done by using the boundary condition
454c4762a1bSJed Brown         u(t,boundary) = g(t,boundary)
455c4762a1bSJed Brown      for some function g. Now take the derivative with respect to t to obtain
456c4762a1bSJed Brown         u_{t}(t,boundary) = g_{t}(t,boundary)
457c4762a1bSJed Brown 
458c4762a1bSJed Brown      In our case, u(t,0) = t + 1, so that u_{t}(t,0) = 1
459c4762a1bSJed Brown              and  u(t,1) = 2t+ 2, so that u_{t}(t,1) = 2
460c4762a1bSJed Brown   */
4619566063dSJacob Faibussowitsch   PetscCallMPI(MPI_Comm_rank(appctx->comm,&rank));
4629566063dSJacob Faibussowitsch   PetscCallMPI(MPI_Comm_size(appctx->comm,&size));
463dd400576SPatrick Sanan   if (rank == 0)          copyptr[0]           = 1.0;
464c4762a1bSJed Brown   if (rank == size-1) copyptr[localsize-1] = 2.0;
465c4762a1bSJed Brown 
466c4762a1bSJed Brown   /*
467c4762a1bSJed Brown      Handle the interior nodes where the PDE is replace by finite
468c4762a1bSJed Brown      difference operators.
469c4762a1bSJed Brown   */
470c4762a1bSJed Brown   for (i=1; i<localsize-1; i++) copyptr[i] =  localptr[i] * sc * (localptr[i+1] + localptr[i-1] - 2.0*localptr[i]);
471c4762a1bSJed Brown 
472c4762a1bSJed Brown   /*
473c4762a1bSJed Brown      Restore vectors
474c4762a1bSJed Brown   */
4759566063dSJacob Faibussowitsch   PetscCall(VecRestoreArrayRead(local_in,&localptr));
4769566063dSJacob Faibussowitsch   PetscCall(VecRestoreArray(localwork,&copyptr));
477c4762a1bSJed Brown 
478c4762a1bSJed Brown   /*
479c4762a1bSJed Brown      Insert values from the local OUTPUT vector into the global
480c4762a1bSJed Brown      output vector
481c4762a1bSJed Brown   */
4829566063dSJacob Faibussowitsch   PetscCall(DMLocalToGlobalBegin(da,localwork,INSERT_VALUES,global_out));
4839566063dSJacob Faibussowitsch   PetscCall(DMLocalToGlobalEnd(da,localwork,INSERT_VALUES,global_out));
484c4762a1bSJed Brown 
485c4762a1bSJed Brown   /* Print debugging information if desired */
486c4762a1bSJed Brown   if (appctx->debug) {
4879566063dSJacob Faibussowitsch     PetscCall(PetscPrintf(appctx->comm,"RHS function vector\n"));
4889566063dSJacob Faibussowitsch     PetscCall(VecView(global_out,PETSC_VIEWER_STDOUT_WORLD));
489c4762a1bSJed Brown   }
490c4762a1bSJed Brown 
491c4762a1bSJed Brown   return 0;
492c4762a1bSJed Brown }
493c4762a1bSJed Brown /* --------------------------------------------------------------------- */
494c4762a1bSJed Brown /*
495c4762a1bSJed Brown    RHSJacobian - User-provided routine to compute the Jacobian of
496c4762a1bSJed Brown    the nonlinear right-hand-side function of the ODE.
497c4762a1bSJed Brown 
498c4762a1bSJed Brown    Input Parameters:
499c4762a1bSJed Brown    ts - the TS context
500c4762a1bSJed Brown    t - current time
501c4762a1bSJed Brown    global_in - global input vector
502c4762a1bSJed Brown    dummy - optional user-defined context, as set by TSetRHSJacobian()
503c4762a1bSJed Brown 
504c4762a1bSJed Brown    Output Parameters:
505c4762a1bSJed Brown    AA - Jacobian matrix
506c4762a1bSJed Brown    BB - optionally different preconditioning matrix
507c4762a1bSJed Brown    str - flag indicating matrix structure
508c4762a1bSJed Brown 
509c4762a1bSJed Brown   Notes:
510c4762a1bSJed Brown   RHSJacobian computes entries for the locally owned part of the Jacobian.
511c4762a1bSJed Brown    - Currently, all PETSc parallel matrix formats are partitioned by
512c4762a1bSJed Brown      contiguous chunks of rows across the processors.
513c4762a1bSJed Brown    - Each processor needs to insert only elements that it owns
514c4762a1bSJed Brown      locally (but any non-local elements will be sent to the
515c4762a1bSJed Brown      appropriate processor during matrix assembly).
516c4762a1bSJed Brown    - Always specify global row and columns of matrix entries when
517c4762a1bSJed Brown      using MatSetValues().
518c4762a1bSJed Brown    - Here, we set all entries for a particular row at once.
519c4762a1bSJed Brown    - Note that MatSetValues() uses 0-based row and column numbers
520c4762a1bSJed Brown      in Fortran as well as in C.
521c4762a1bSJed Brown */
522c4762a1bSJed Brown PetscErrorCode RHSJacobian(TS ts,PetscReal t,Vec global_in,Mat AA,Mat BB,void *ctx)
523c4762a1bSJed Brown {
524c4762a1bSJed Brown   AppCtx            *appctx  = (AppCtx*)ctx;    /* user-defined application context */
525c4762a1bSJed Brown   Vec               local_in = appctx->u_local;   /* local ghosted input vector */
526c4762a1bSJed Brown   DM                da       = appctx->da;        /* distributed array */
527c4762a1bSJed Brown   PetscScalar       v[3],sc;
528c4762a1bSJed Brown   const PetscScalar *localptr;
529c4762a1bSJed Brown   PetscInt          i,mstart,mend,mstarts,mends,idx[3],is;
530c4762a1bSJed Brown 
531c4762a1bSJed Brown   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
532c4762a1bSJed Brown      Get ready for local Jacobian computations
533c4762a1bSJed Brown      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
534c4762a1bSJed Brown   /*
535c4762a1bSJed Brown      Scatter ghost points to local vector, using the 2-step process
536c4762a1bSJed Brown         DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
537c4762a1bSJed Brown      By placing code between these two statements, computations can be
538c4762a1bSJed Brown      done while messages are in transition.
539c4762a1bSJed Brown   */
5409566063dSJacob Faibussowitsch   PetscCall(DMGlobalToLocalBegin(da,global_in,INSERT_VALUES,local_in));
5419566063dSJacob Faibussowitsch   PetscCall(DMGlobalToLocalEnd(da,global_in,INSERT_VALUES,local_in));
542c4762a1bSJed Brown 
543c4762a1bSJed Brown   /*
544c4762a1bSJed Brown      Get pointer to vector data
545c4762a1bSJed Brown   */
5469566063dSJacob Faibussowitsch   PetscCall(VecGetArrayRead(local_in,&localptr));
547c4762a1bSJed Brown 
548c4762a1bSJed Brown   /*
549c4762a1bSJed Brown      Get starting and ending locally owned rows of the matrix
550c4762a1bSJed Brown   */
5519566063dSJacob Faibussowitsch   PetscCall(MatGetOwnershipRange(BB,&mstarts,&mends));
552c4762a1bSJed Brown   mstart = mstarts; mend = mends;
553c4762a1bSJed Brown 
554c4762a1bSJed Brown   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
555c4762a1bSJed Brown      Compute entries for the locally owned part of the Jacobian.
556c4762a1bSJed Brown       - Currently, all PETSc parallel matrix formats are partitioned by
557c4762a1bSJed Brown         contiguous chunks of rows across the processors.
558c4762a1bSJed Brown       - Each processor needs to insert only elements that it owns
559c4762a1bSJed Brown         locally (but any non-local elements will be sent to the
560c4762a1bSJed Brown         appropriate processor during matrix assembly).
561c4762a1bSJed Brown       - Here, we set all entries for a particular row at once.
562c4762a1bSJed Brown       - We can set matrix entries either using either
563c4762a1bSJed Brown         MatSetValuesLocal() or MatSetValues().
564c4762a1bSJed Brown      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
565c4762a1bSJed Brown 
566c4762a1bSJed Brown   /*
567c4762a1bSJed Brown      Set matrix rows corresponding to boundary data
568c4762a1bSJed Brown   */
569c4762a1bSJed Brown   if (mstart == 0) {
570c4762a1bSJed Brown     v[0] = 0.0;
5719566063dSJacob Faibussowitsch     PetscCall(MatSetValues(BB,1,&mstart,1,&mstart,v,INSERT_VALUES));
572c4762a1bSJed Brown     mstart++;
573c4762a1bSJed Brown   }
574c4762a1bSJed Brown   if (mend == appctx->m) {
575c4762a1bSJed Brown     mend--;
576c4762a1bSJed Brown     v[0] = 0.0;
5779566063dSJacob Faibussowitsch     PetscCall(MatSetValues(BB,1,&mend,1,&mend,v,INSERT_VALUES));
578c4762a1bSJed Brown   }
579c4762a1bSJed Brown 
580c4762a1bSJed Brown   /*
581c4762a1bSJed Brown      Set matrix rows corresponding to interior data.  We construct the
582c4762a1bSJed Brown      matrix one row at a time.
583c4762a1bSJed Brown   */
584c4762a1bSJed Brown   sc = 1.0/(appctx->h*appctx->h*2.0*(1.0+t)*(1.0+t));
585c4762a1bSJed Brown   for (i=mstart; i<mend; i++) {
586c4762a1bSJed Brown     idx[0] = i-1; idx[1] = i; idx[2] = i+1;
587c4762a1bSJed Brown     is     = i - mstart + 1;
588c4762a1bSJed Brown     v[0]   = sc*localptr[is];
589c4762a1bSJed Brown     v[1]   = sc*(localptr[is+1] + localptr[is-1] - 4.0*localptr[is]);
590c4762a1bSJed Brown     v[2]   = sc*localptr[is];
5919566063dSJacob Faibussowitsch     PetscCall(MatSetValues(BB,1,&i,3,idx,v,INSERT_VALUES));
592c4762a1bSJed Brown   }
593c4762a1bSJed Brown 
594c4762a1bSJed Brown   /*
595c4762a1bSJed Brown      Restore vector
596c4762a1bSJed Brown   */
5979566063dSJacob Faibussowitsch   PetscCall(VecRestoreArrayRead(local_in,&localptr));
598c4762a1bSJed Brown 
599c4762a1bSJed Brown   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
600c4762a1bSJed Brown      Complete the matrix assembly process and set some options
601c4762a1bSJed Brown      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
602c4762a1bSJed Brown   /*
603c4762a1bSJed Brown      Assemble matrix, using the 2-step process:
604c4762a1bSJed Brown        MatAssemblyBegin(), MatAssemblyEnd()
605c4762a1bSJed Brown      Computations can be done while messages are in transition
606c4762a1bSJed Brown      by placing code between these two statements.
607c4762a1bSJed Brown   */
6089566063dSJacob Faibussowitsch   PetscCall(MatAssemblyBegin(BB,MAT_FINAL_ASSEMBLY));
6099566063dSJacob Faibussowitsch   PetscCall(MatAssemblyEnd(BB,MAT_FINAL_ASSEMBLY));
610c4762a1bSJed Brown   if (BB != AA) {
6119566063dSJacob Faibussowitsch     PetscCall(MatAssemblyBegin(AA,MAT_FINAL_ASSEMBLY));
6129566063dSJacob Faibussowitsch     PetscCall(MatAssemblyEnd(AA,MAT_FINAL_ASSEMBLY));
613c4762a1bSJed Brown   }
614c4762a1bSJed Brown 
615c4762a1bSJed Brown   /*
616c4762a1bSJed Brown      Set and option to indicate that we will never add a new nonzero location
617c4762a1bSJed Brown      to the matrix. If we do, it will generate an error.
618c4762a1bSJed Brown   */
6199566063dSJacob Faibussowitsch   PetscCall(MatSetOption(BB,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE));
620c4762a1bSJed Brown 
621c4762a1bSJed Brown   return 0;
622c4762a1bSJed Brown }
623c4762a1bSJed Brown 
624c4762a1bSJed Brown /*TEST
625c4762a1bSJed Brown 
626c4762a1bSJed Brown     test:
627c4762a1bSJed Brown       args: -nox -ts_dt 10 -mymonitor
628c4762a1bSJed Brown       nsize: 2
629c4762a1bSJed Brown       requires: !single
630c4762a1bSJed Brown 
631c4762a1bSJed Brown     test:
632c4762a1bSJed Brown       suffix: tut_1
633c4762a1bSJed Brown       nsize: 1
634c4762a1bSJed Brown       args: -ts_max_steps 10 -ts_monitor
635c4762a1bSJed Brown 
636c4762a1bSJed Brown     test:
637c4762a1bSJed Brown       suffix: tut_2
638c4762a1bSJed Brown       nsize: 4
639c4762a1bSJed Brown       args: -ts_max_steps 10 -ts_monitor -snes_monitor -ksp_monitor
640c4762a1bSJed Brown 
641c4762a1bSJed Brown     test:
642c4762a1bSJed Brown       suffix: tut_3
643c4762a1bSJed Brown       nsize: 4
6442e16c0ceSBarry Smith       args: -ts_max_steps 10 -ts_monitor -M 128
645c4762a1bSJed Brown 
646c4762a1bSJed Brown TEST*/
647