xref: /petsc/src/ts/tutorials/ex2.c (revision 3ba1676111f5c958fe6c2729b46ca4d523958bb3) !
1c4762a1bSJed Brown 
2c4762a1bSJed Brown static char help[] = "Solves a time-dependent nonlinear PDE. Uses implicit\n\
3c4762a1bSJed Brown timestepping.  Runtime options include:\n\
4c4762a1bSJed Brown   -M <xg>, where <xg> = number of grid points\n\
5c4762a1bSJed Brown   -debug : Activate debugging printouts\n\
6c4762a1bSJed Brown   -nox   : Deactivate x-window graphics\n\n";
7c4762a1bSJed Brown 
8c4762a1bSJed Brown /* ------------------------------------------------------------------------
9c4762a1bSJed Brown 
10c4762a1bSJed Brown    This program solves the PDE
11c4762a1bSJed Brown 
12c4762a1bSJed Brown                u * u_xx
13c4762a1bSJed Brown          u_t = ---------
14c4762a1bSJed Brown                2*(t+1)^2
15c4762a1bSJed Brown 
16c4762a1bSJed Brown     on the domain 0 <= x <= 1, with boundary conditions
17c4762a1bSJed Brown          u(t,0) = t + 1,  u(t,1) = 2*t + 2,
18c4762a1bSJed Brown     and initial condition
19c4762a1bSJed Brown          u(0,x) = 1 + x*x.
20c4762a1bSJed Brown 
21c4762a1bSJed Brown     The exact solution is:
22c4762a1bSJed Brown          u(t,x) = (1 + x*x) * (1 + t)
23c4762a1bSJed Brown 
24c4762a1bSJed Brown     Note that since the solution is linear in time and quadratic in x,
25c4762a1bSJed Brown     the finite difference scheme actually computes the "exact" solution.
26c4762a1bSJed Brown 
27c4762a1bSJed Brown     We use by default the backward Euler method.
28c4762a1bSJed Brown 
29c4762a1bSJed Brown   ------------------------------------------------------------------------- */
30c4762a1bSJed Brown 
31c4762a1bSJed Brown /*
32c4762a1bSJed Brown    Include "petscts.h" to use the PETSc timestepping routines. Note that
33c4762a1bSJed Brown    this file automatically includes "petscsys.h" and other lower-level
34c4762a1bSJed Brown    PETSc include files.
35c4762a1bSJed Brown 
36c4762a1bSJed Brown    Include the "petscdmda.h" to allow us to use the distributed array data
37c4762a1bSJed Brown    structures to manage the parallel grid.
38c4762a1bSJed Brown */
39c4762a1bSJed Brown #include <petscts.h>
40c4762a1bSJed Brown #include <petscdm.h>
41c4762a1bSJed Brown #include <petscdmda.h>
42c4762a1bSJed Brown #include <petscdraw.h>
43c4762a1bSJed Brown 
44c4762a1bSJed Brown /*
45c4762a1bSJed Brown    User-defined application context - contains data needed by the
46c4762a1bSJed Brown    application-provided callback routines.
47c4762a1bSJed Brown */
48c4762a1bSJed Brown typedef struct {
49c4762a1bSJed Brown   MPI_Comm  comm;      /* communicator */
50c4762a1bSJed Brown   DM        da;        /* distributed array data structure */
51c4762a1bSJed Brown   Vec       localwork; /* local ghosted work vector */
52c4762a1bSJed Brown   Vec       u_local;   /* local ghosted approximate solution vector */
53c4762a1bSJed Brown   Vec       solution;  /* global exact solution vector */
54c4762a1bSJed Brown   PetscInt  m;         /* total number of grid points */
55c4762a1bSJed Brown   PetscReal h;         /* mesh width: h = 1/(m-1) */
56c4762a1bSJed Brown   PetscBool debug;     /* flag (1 indicates activation of debugging printouts) */
57c4762a1bSJed Brown } AppCtx;
58c4762a1bSJed Brown 
59c4762a1bSJed Brown /*
60c4762a1bSJed Brown    User-defined routines, provided below.
61c4762a1bSJed Brown */
62c4762a1bSJed Brown extern PetscErrorCode InitialConditions(Vec, AppCtx *);
63c4762a1bSJed Brown extern PetscErrorCode RHSFunction(TS, PetscReal, Vec, Vec, void *);
64c4762a1bSJed Brown extern PetscErrorCode RHSJacobian(TS, PetscReal, Vec, Mat, Mat, void *);
65c4762a1bSJed Brown extern PetscErrorCode Monitor(TS, PetscInt, PetscReal, Vec, void *);
66c4762a1bSJed Brown extern PetscErrorCode ExactSolution(PetscReal, Vec, AppCtx *);
67c4762a1bSJed Brown 
68d71ae5a4SJacob Faibussowitsch int main(int argc, char **argv)
69d71ae5a4SJacob Faibussowitsch {
70c4762a1bSJed Brown   AppCtx    appctx;               /* user-defined application context */
71c4762a1bSJed Brown   TS        ts;                   /* timestepping context */
72c4762a1bSJed Brown   Mat       A;                    /* Jacobian matrix data structure */
73c4762a1bSJed Brown   Vec       u;                    /* approximate solution vector */
74c4762a1bSJed Brown   PetscInt  time_steps_max = 100; /* default max timesteps */
75c4762a1bSJed Brown   PetscReal dt;
76c4762a1bSJed Brown   PetscReal time_total_max = 100.0; /* default max total time */
77c4762a1bSJed Brown   PetscBool mymonitor      = PETSC_FALSE;
78c4762a1bSJed Brown   PetscReal bounds[]       = {1.0, 3.3};
79c4762a1bSJed Brown 
80c4762a1bSJed Brown   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
81c4762a1bSJed Brown      Initialize program and set problem parameters
82c4762a1bSJed Brown      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
83c4762a1bSJed Brown 
84327415f7SBarry Smith   PetscFunctionBeginUser;
859566063dSJacob Faibussowitsch   PetscCall(PetscInitialize(&argc, &argv, (char *)0, help));
869566063dSJacob Faibussowitsch   PetscCall(PetscViewerDrawSetBounds(PETSC_VIEWER_DRAW_(PETSC_COMM_WORLD), 1, bounds));
87c4762a1bSJed Brown 
88c4762a1bSJed Brown   appctx.comm = PETSC_COMM_WORLD;
89c4762a1bSJed Brown   appctx.m    = 60;
90c4762a1bSJed Brown 
919566063dSJacob Faibussowitsch   PetscCall(PetscOptionsGetInt(NULL, NULL, "-M", &appctx.m, NULL));
929566063dSJacob Faibussowitsch   PetscCall(PetscOptionsHasName(NULL, NULL, "-debug", &appctx.debug));
939566063dSJacob Faibussowitsch   PetscCall(PetscOptionsHasName(NULL, NULL, "-mymonitor", &mymonitor));
94c4762a1bSJed Brown 
95c4762a1bSJed Brown   appctx.h = 1.0 / (appctx.m - 1.0);
96c4762a1bSJed Brown 
97c4762a1bSJed Brown   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
98c4762a1bSJed Brown      Create vector data structures
99c4762a1bSJed Brown      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
100c4762a1bSJed Brown 
101c4762a1bSJed Brown   /*
102c4762a1bSJed Brown      Create distributed array (DMDA) to manage parallel grid and vectors
103c4762a1bSJed Brown      and to set up the ghost point communication pattern.  There are M
104c4762a1bSJed Brown      total grid values spread equally among all the processors.
105c4762a1bSJed Brown   */
1069566063dSJacob Faibussowitsch   PetscCall(DMDACreate1d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, appctx.m, 1, 1, NULL, &appctx.da));
1079566063dSJacob Faibussowitsch   PetscCall(DMSetFromOptions(appctx.da));
1089566063dSJacob Faibussowitsch   PetscCall(DMSetUp(appctx.da));
109c4762a1bSJed Brown 
110c4762a1bSJed Brown   /*
111c4762a1bSJed Brown      Extract global and local vectors from DMDA; we use these to store the
112c4762a1bSJed Brown      approximate solution.  Then duplicate these for remaining vectors that
113c4762a1bSJed Brown      have the same types.
114c4762a1bSJed Brown   */
1159566063dSJacob Faibussowitsch   PetscCall(DMCreateGlobalVector(appctx.da, &u));
1169566063dSJacob Faibussowitsch   PetscCall(DMCreateLocalVector(appctx.da, &appctx.u_local));
117c4762a1bSJed Brown 
118c4762a1bSJed Brown   /*
119c4762a1bSJed Brown      Create local work vector for use in evaluating right-hand-side function;
120c4762a1bSJed Brown      create global work vector for storing exact solution.
121c4762a1bSJed Brown   */
1229566063dSJacob Faibussowitsch   PetscCall(VecDuplicate(appctx.u_local, &appctx.localwork));
1239566063dSJacob Faibussowitsch   PetscCall(VecDuplicate(u, &appctx.solution));
124c4762a1bSJed Brown 
125c4762a1bSJed Brown   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
126c4762a1bSJed Brown      Create timestepping solver context; set callback routine for
127c4762a1bSJed Brown      right-hand-side function evaluation.
128c4762a1bSJed Brown      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
129c4762a1bSJed Brown 
1309566063dSJacob Faibussowitsch   PetscCall(TSCreate(PETSC_COMM_WORLD, &ts));
1319566063dSJacob Faibussowitsch   PetscCall(TSSetProblemType(ts, TS_NONLINEAR));
1329566063dSJacob Faibussowitsch   PetscCall(TSSetRHSFunction(ts, NULL, RHSFunction, &appctx));
133c4762a1bSJed Brown 
134c4762a1bSJed Brown   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
135c4762a1bSJed Brown      Set optional user-defined monitoring routine
136c4762a1bSJed Brown      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
137c4762a1bSJed Brown 
13848a46eb9SPierre Jolivet   if (mymonitor) PetscCall(TSMonitorSet(ts, Monitor, &appctx, NULL));
139c4762a1bSJed Brown 
140c4762a1bSJed Brown   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
141c4762a1bSJed Brown      For nonlinear problems, the user can provide a Jacobian evaluation
142c4762a1bSJed Brown      routine (or use a finite differencing approximation).
143c4762a1bSJed Brown 
144c4762a1bSJed Brown      Create matrix data structure; set Jacobian evaluation routine.
145c4762a1bSJed Brown      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
146c4762a1bSJed Brown 
1479566063dSJacob Faibussowitsch   PetscCall(MatCreate(PETSC_COMM_WORLD, &A));
1489566063dSJacob Faibussowitsch   PetscCall(MatSetSizes(A, PETSC_DECIDE, PETSC_DECIDE, appctx.m, appctx.m));
1499566063dSJacob Faibussowitsch   PetscCall(MatSetFromOptions(A));
1509566063dSJacob Faibussowitsch   PetscCall(MatSetUp(A));
1519566063dSJacob Faibussowitsch   PetscCall(TSSetRHSJacobian(ts, A, A, RHSJacobian, &appctx));
152c4762a1bSJed Brown 
153c4762a1bSJed Brown   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
154c4762a1bSJed Brown      Set solution vector and initial timestep
155c4762a1bSJed Brown      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
156c4762a1bSJed Brown 
157c4762a1bSJed Brown   dt = appctx.h / 2.0;
1589566063dSJacob Faibussowitsch   PetscCall(TSSetTimeStep(ts, dt));
159c4762a1bSJed Brown 
160c4762a1bSJed Brown   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
161c4762a1bSJed Brown      Customize timestepping solver:
162c4762a1bSJed Brown        - Set the solution method to be the Backward Euler method.
163c4762a1bSJed Brown        - Set timestepping duration info
164c4762a1bSJed Brown      Then set runtime options, which can override these defaults.
165c4762a1bSJed Brown      For example,
166c4762a1bSJed Brown           -ts_max_steps <maxsteps> -ts_max_time <maxtime>
167c4762a1bSJed Brown      to override the defaults set by TSSetMaxSteps()/TSSetMaxTime().
168c4762a1bSJed Brown      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
169c4762a1bSJed Brown 
1709566063dSJacob Faibussowitsch   PetscCall(TSSetType(ts, TSBEULER));
1719566063dSJacob Faibussowitsch   PetscCall(TSSetMaxSteps(ts, time_steps_max));
1729566063dSJacob Faibussowitsch   PetscCall(TSSetMaxTime(ts, time_total_max));
1739566063dSJacob Faibussowitsch   PetscCall(TSSetExactFinalTime(ts, TS_EXACTFINALTIME_STEPOVER));
1749566063dSJacob Faibussowitsch   PetscCall(TSSetFromOptions(ts));
175c4762a1bSJed Brown 
176c4762a1bSJed Brown   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
177c4762a1bSJed Brown      Solve the problem
178c4762a1bSJed Brown      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
179c4762a1bSJed Brown 
180c4762a1bSJed Brown   /*
181c4762a1bSJed Brown      Evaluate initial conditions
182c4762a1bSJed Brown   */
1839566063dSJacob Faibussowitsch   PetscCall(InitialConditions(u, &appctx));
184c4762a1bSJed Brown 
185c4762a1bSJed Brown   /*
186c4762a1bSJed Brown      Run the timestepping solver
187c4762a1bSJed Brown   */
1889566063dSJacob Faibussowitsch   PetscCall(TSSolve(ts, u));
189c4762a1bSJed Brown 
190c4762a1bSJed Brown   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
191c4762a1bSJed Brown      Free work space.  All PETSc objects should be destroyed when they
192c4762a1bSJed Brown      are no longer needed.
193c4762a1bSJed Brown      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
194c4762a1bSJed Brown 
1959566063dSJacob Faibussowitsch   PetscCall(TSDestroy(&ts));
1969566063dSJacob Faibussowitsch   PetscCall(VecDestroy(&u));
1979566063dSJacob Faibussowitsch   PetscCall(MatDestroy(&A));
1989566063dSJacob Faibussowitsch   PetscCall(DMDestroy(&appctx.da));
1999566063dSJacob Faibussowitsch   PetscCall(VecDestroy(&appctx.localwork));
2009566063dSJacob Faibussowitsch   PetscCall(VecDestroy(&appctx.solution));
2019566063dSJacob Faibussowitsch   PetscCall(VecDestroy(&appctx.u_local));
202c4762a1bSJed Brown 
203c4762a1bSJed Brown   /*
204c4762a1bSJed Brown      Always call PetscFinalize() before exiting a program.  This routine
205c4762a1bSJed Brown        - finalizes the PETSc libraries as well as MPI
206c4762a1bSJed Brown        - provides summary and diagnostic information if certain runtime
207c4762a1bSJed Brown          options are chosen (e.g., -log_view).
208c4762a1bSJed Brown   */
2099566063dSJacob Faibussowitsch   PetscCall(PetscFinalize());
210b122ec5aSJacob Faibussowitsch   return 0;
211c4762a1bSJed Brown }
212c4762a1bSJed Brown /* --------------------------------------------------------------------- */
213c4762a1bSJed Brown /*
214c4762a1bSJed Brown    InitialConditions - Computes the solution at the initial time.
215c4762a1bSJed Brown 
216c4762a1bSJed Brown    Input Parameters:
217c4762a1bSJed Brown    u - uninitialized solution vector (global)
218c4762a1bSJed Brown    appctx - user-defined application context
219c4762a1bSJed Brown 
220c4762a1bSJed Brown    Output Parameter:
221c4762a1bSJed Brown    u - vector with solution at initial time (global)
222c4762a1bSJed Brown */
223d71ae5a4SJacob Faibussowitsch PetscErrorCode InitialConditions(Vec u, AppCtx *appctx)
224d71ae5a4SJacob Faibussowitsch {
225c4762a1bSJed Brown   PetscScalar *u_localptr, h = appctx->h, x;
226c4762a1bSJed Brown   PetscInt     i, mybase, myend;
227c4762a1bSJed Brown 
228*3ba16761SJacob Faibussowitsch   PetscFunctionBeginUser;
229c4762a1bSJed Brown   /*
230c4762a1bSJed Brown      Determine starting point of each processor's range of
231c4762a1bSJed Brown      grid values.
232c4762a1bSJed Brown   */
2339566063dSJacob Faibussowitsch   PetscCall(VecGetOwnershipRange(u, &mybase, &myend));
234c4762a1bSJed Brown 
235c4762a1bSJed Brown   /*
236c4762a1bSJed Brown     Get a pointer to vector data.
237c4762a1bSJed Brown     - For default PETSc vectors, VecGetArray() returns a pointer to
238c4762a1bSJed Brown       the data array.  Otherwise, the routine is implementation dependent.
239c4762a1bSJed Brown     - You MUST call VecRestoreArray() when you no longer need access to
240c4762a1bSJed Brown       the array.
241c4762a1bSJed Brown     - Note that the Fortran interface to VecGetArray() differs from the
242c4762a1bSJed Brown       C version.  See the users manual for details.
243c4762a1bSJed Brown   */
2449566063dSJacob Faibussowitsch   PetscCall(VecGetArray(u, &u_localptr));
245c4762a1bSJed Brown 
246c4762a1bSJed Brown   /*
247c4762a1bSJed Brown      We initialize the solution array by simply writing the solution
248c4762a1bSJed Brown      directly into the array locations.  Alternatively, we could use
249c4762a1bSJed Brown      VecSetValues() or VecSetValuesLocal().
250c4762a1bSJed Brown   */
251c4762a1bSJed Brown   for (i = mybase; i < myend; i++) {
252c4762a1bSJed Brown     x                      = h * (PetscReal)i; /* current location in global grid */
253c4762a1bSJed Brown     u_localptr[i - mybase] = 1.0 + x * x;
254c4762a1bSJed Brown   }
255c4762a1bSJed Brown 
256c4762a1bSJed Brown   /*
257c4762a1bSJed Brown      Restore vector
258c4762a1bSJed Brown   */
2599566063dSJacob Faibussowitsch   PetscCall(VecRestoreArray(u, &u_localptr));
260c4762a1bSJed Brown 
261c4762a1bSJed Brown   /*
262c4762a1bSJed Brown      Print debugging information if desired
263c4762a1bSJed Brown   */
264c4762a1bSJed Brown   if (appctx->debug) {
2659566063dSJacob Faibussowitsch     PetscCall(PetscPrintf(appctx->comm, "initial guess vector\n"));
2669566063dSJacob Faibussowitsch     PetscCall(VecView(u, PETSC_VIEWER_STDOUT_WORLD));
267c4762a1bSJed Brown   }
268c4762a1bSJed Brown 
269*3ba16761SJacob Faibussowitsch   PetscFunctionReturn(PETSC_SUCCESS);
270c4762a1bSJed Brown }
271c4762a1bSJed Brown /* --------------------------------------------------------------------- */
272c4762a1bSJed Brown /*
273c4762a1bSJed Brown    ExactSolution - Computes the exact solution at a given time.
274c4762a1bSJed Brown 
275c4762a1bSJed Brown    Input Parameters:
276c4762a1bSJed Brown    t - current time
277c4762a1bSJed Brown    solution - vector in which exact solution will be computed
278c4762a1bSJed Brown    appctx - user-defined application context
279c4762a1bSJed Brown 
280c4762a1bSJed Brown    Output Parameter:
281c4762a1bSJed Brown    solution - vector with the newly computed exact solution
282c4762a1bSJed Brown */
283d71ae5a4SJacob Faibussowitsch PetscErrorCode ExactSolution(PetscReal t, Vec solution, AppCtx *appctx)
284d71ae5a4SJacob Faibussowitsch {
285c4762a1bSJed Brown   PetscScalar *s_localptr, h = appctx->h, x;
286c4762a1bSJed Brown   PetscInt     i, mybase, myend;
287c4762a1bSJed Brown 
288*3ba16761SJacob Faibussowitsch   PetscFunctionBeginUser;
289c4762a1bSJed Brown   /*
290c4762a1bSJed Brown      Determine starting and ending points of each processor's
291c4762a1bSJed Brown      range of grid values
292c4762a1bSJed Brown   */
2939566063dSJacob Faibussowitsch   PetscCall(VecGetOwnershipRange(solution, &mybase, &myend));
294c4762a1bSJed Brown 
295c4762a1bSJed Brown   /*
296c4762a1bSJed Brown      Get a pointer to vector data.
297c4762a1bSJed Brown   */
2989566063dSJacob Faibussowitsch   PetscCall(VecGetArray(solution, &s_localptr));
299c4762a1bSJed Brown 
300c4762a1bSJed Brown   /*
301c4762a1bSJed Brown      Simply write the solution directly into the array locations.
302c4762a1bSJed Brown      Alternatively, we could use VecSetValues() or VecSetValuesLocal().
303c4762a1bSJed Brown   */
304c4762a1bSJed Brown   for (i = mybase; i < myend; i++) {
305c4762a1bSJed Brown     x                      = h * (PetscReal)i;
306c4762a1bSJed Brown     s_localptr[i - mybase] = (t + 1.0) * (1.0 + x * x);
307c4762a1bSJed Brown   }
308c4762a1bSJed Brown 
309c4762a1bSJed Brown   /*
310c4762a1bSJed Brown      Restore vector
311c4762a1bSJed Brown   */
3129566063dSJacob Faibussowitsch   PetscCall(VecRestoreArray(solution, &s_localptr));
313*3ba16761SJacob Faibussowitsch   PetscFunctionReturn(PETSC_SUCCESS);
314c4762a1bSJed Brown }
315c4762a1bSJed Brown /* --------------------------------------------------------------------- */
316c4762a1bSJed Brown /*
317c4762a1bSJed Brown    Monitor - User-provided routine to monitor the solution computed at
318c4762a1bSJed Brown    each timestep.  This example plots the solution and computes the
319c4762a1bSJed Brown    error in two different norms.
320c4762a1bSJed Brown 
321c4762a1bSJed Brown    Input Parameters:
322c4762a1bSJed Brown    ts     - the timestep context
323c4762a1bSJed Brown    step   - the count of the current step (with 0 meaning the
324c4762a1bSJed Brown             initial condition)
325c4762a1bSJed Brown    time   - the current time
326c4762a1bSJed Brown    u      - the solution at this timestep
327c4762a1bSJed Brown    ctx    - the user-provided context for this monitoring routine.
328c4762a1bSJed Brown             In this case we use the application context which contains
329c4762a1bSJed Brown             information about the problem size, workspace and the exact
330c4762a1bSJed Brown             solution.
331c4762a1bSJed Brown */
332d71ae5a4SJacob Faibussowitsch PetscErrorCode Monitor(TS ts, PetscInt step, PetscReal time, Vec u, void *ctx)
333d71ae5a4SJacob Faibussowitsch {
334c4762a1bSJed Brown   AppCtx   *appctx = (AppCtx *)ctx; /* user-defined application context */
335c4762a1bSJed Brown   PetscReal en2, en2s, enmax;
336c4762a1bSJed Brown   PetscDraw draw;
337c4762a1bSJed Brown 
338*3ba16761SJacob Faibussowitsch   PetscFunctionBeginUser;
339c4762a1bSJed Brown   /*
340e1dfdf8eSBarry Smith      We use the default X Windows viewer
341c4762a1bSJed Brown              PETSC_VIEWER_DRAW_(appctx->comm)
342c4762a1bSJed Brown      that is associated with the current communicator. This saves
343c4762a1bSJed Brown      the effort of calling PetscViewerDrawOpen() to create the window.
344c4762a1bSJed Brown      Note that if we wished to plot several items in separate windows we
345c4762a1bSJed Brown      would create each viewer with PetscViewerDrawOpen() and store them in
346c4762a1bSJed Brown      the application context, appctx.
347c4762a1bSJed Brown 
348c4762a1bSJed Brown      PetscReal buffering makes graphics look better.
349c4762a1bSJed Brown   */
3509566063dSJacob Faibussowitsch   PetscCall(PetscViewerDrawGetDraw(PETSC_VIEWER_DRAW_(appctx->comm), 0, &draw));
3519566063dSJacob Faibussowitsch   PetscCall(PetscDrawSetDoubleBuffer(draw));
3529566063dSJacob Faibussowitsch   PetscCall(VecView(u, PETSC_VIEWER_DRAW_(appctx->comm)));
353c4762a1bSJed Brown 
354c4762a1bSJed Brown   /*
355c4762a1bSJed Brown      Compute the exact solution at this timestep
356c4762a1bSJed Brown   */
3579566063dSJacob Faibussowitsch   PetscCall(ExactSolution(time, appctx->solution, appctx));
358c4762a1bSJed Brown 
359c4762a1bSJed Brown   /*
360c4762a1bSJed Brown      Print debugging information if desired
361c4762a1bSJed Brown   */
362c4762a1bSJed Brown   if (appctx->debug) {
3639566063dSJacob Faibussowitsch     PetscCall(PetscPrintf(appctx->comm, "Computed solution vector\n"));
3649566063dSJacob Faibussowitsch     PetscCall(VecView(u, PETSC_VIEWER_STDOUT_WORLD));
3659566063dSJacob Faibussowitsch     PetscCall(PetscPrintf(appctx->comm, "Exact solution vector\n"));
3669566063dSJacob Faibussowitsch     PetscCall(VecView(appctx->solution, PETSC_VIEWER_STDOUT_WORLD));
367c4762a1bSJed Brown   }
368c4762a1bSJed Brown 
369c4762a1bSJed Brown   /*
370c4762a1bSJed Brown      Compute the 2-norm and max-norm of the error
371c4762a1bSJed Brown   */
3729566063dSJacob Faibussowitsch   PetscCall(VecAXPY(appctx->solution, -1.0, u));
3739566063dSJacob Faibussowitsch   PetscCall(VecNorm(appctx->solution, NORM_2, &en2));
374c4762a1bSJed Brown   en2s = PetscSqrtReal(appctx->h) * en2; /* scale the 2-norm by the grid spacing */
3759566063dSJacob Faibussowitsch   PetscCall(VecNorm(appctx->solution, NORM_MAX, &enmax));
376c4762a1bSJed Brown 
377c4762a1bSJed Brown   /*
378c4762a1bSJed Brown      PetscPrintf() causes only the first processor in this
379c4762a1bSJed Brown      communicator to print the timestep information.
380c4762a1bSJed Brown   */
38163a3b9bcSJacob Faibussowitsch   PetscCall(PetscPrintf(appctx->comm, "Timestep %" PetscInt_FMT ": time = %g 2-norm error = %g  max norm error = %g\n", step, (double)time, (double)en2s, (double)enmax));
382c4762a1bSJed Brown 
383c4762a1bSJed Brown   /*
384c4762a1bSJed Brown      Print debugging information if desired
385c4762a1bSJed Brown   */
386c4762a1bSJed Brown   if (appctx->debug) {
3879566063dSJacob Faibussowitsch     PetscCall(PetscPrintf(appctx->comm, "Error vector\n"));
3889566063dSJacob Faibussowitsch     PetscCall(VecView(appctx->solution, PETSC_VIEWER_STDOUT_WORLD));
389c4762a1bSJed Brown   }
390*3ba16761SJacob Faibussowitsch   PetscFunctionReturn(PETSC_SUCCESS);
391c4762a1bSJed Brown }
392c4762a1bSJed Brown /* --------------------------------------------------------------------- */
393c4762a1bSJed Brown /*
394c4762a1bSJed Brown    RHSFunction - User-provided routine that evalues the right-hand-side
395c4762a1bSJed Brown    function of the ODE.  This routine is set in the main program by
396c4762a1bSJed Brown    calling TSSetRHSFunction().  We compute:
397c4762a1bSJed Brown           global_out = F(global_in)
398c4762a1bSJed Brown 
399c4762a1bSJed Brown    Input Parameters:
400c4762a1bSJed Brown    ts         - timesteping context
401c4762a1bSJed Brown    t          - current time
402c4762a1bSJed Brown    global_in  - vector containing the current iterate
403c4762a1bSJed Brown    ctx        - (optional) user-provided context for function evaluation.
404c4762a1bSJed Brown                 In this case we use the appctx defined above.
405c4762a1bSJed Brown 
406c4762a1bSJed Brown    Output Parameter:
407c4762a1bSJed Brown    global_out - vector containing the newly evaluated function
408c4762a1bSJed Brown */
409d71ae5a4SJacob Faibussowitsch PetscErrorCode RHSFunction(TS ts, PetscReal t, Vec global_in, Vec global_out, void *ctx)
410d71ae5a4SJacob Faibussowitsch {
411c4762a1bSJed Brown   AppCtx            *appctx    = (AppCtx *)ctx;     /* user-defined application context */
412c4762a1bSJed Brown   DM                 da        = appctx->da;        /* distributed array */
413c4762a1bSJed Brown   Vec                local_in  = appctx->u_local;   /* local ghosted input vector */
414c4762a1bSJed Brown   Vec                localwork = appctx->localwork; /* local ghosted work vector */
415c4762a1bSJed Brown   PetscInt           i, localsize;
416c4762a1bSJed Brown   PetscMPIInt        rank, size;
417c4762a1bSJed Brown   PetscScalar       *copyptr, sc;
418c4762a1bSJed Brown   const PetscScalar *localptr;
419c4762a1bSJed Brown 
420*3ba16761SJacob Faibussowitsch   PetscFunctionBeginUser;
421c4762a1bSJed Brown   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
422c4762a1bSJed Brown      Get ready for local function computations
423c4762a1bSJed Brown      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
424c4762a1bSJed Brown   /*
425c4762a1bSJed Brown      Scatter ghost points to local vector, using the 2-step process
426c4762a1bSJed Brown         DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
427c4762a1bSJed Brown      By placing code between these two statements, computations can be
428c4762a1bSJed Brown      done while messages are in transition.
429c4762a1bSJed Brown   */
4309566063dSJacob Faibussowitsch   PetscCall(DMGlobalToLocalBegin(da, global_in, INSERT_VALUES, local_in));
4319566063dSJacob Faibussowitsch   PetscCall(DMGlobalToLocalEnd(da, global_in, INSERT_VALUES, local_in));
432c4762a1bSJed Brown 
433c4762a1bSJed Brown   /*
434c4762a1bSJed Brown       Access directly the values in our local INPUT work array
435c4762a1bSJed Brown   */
4369566063dSJacob Faibussowitsch   PetscCall(VecGetArrayRead(local_in, &localptr));
437c4762a1bSJed Brown 
438c4762a1bSJed Brown   /*
439c4762a1bSJed Brown       Access directly the values in our local OUTPUT work array
440c4762a1bSJed Brown   */
4419566063dSJacob Faibussowitsch   PetscCall(VecGetArray(localwork, &copyptr));
442c4762a1bSJed Brown 
443c4762a1bSJed Brown   sc = 1.0 / (appctx->h * appctx->h * 2.0 * (1.0 + t) * (1.0 + t));
444c4762a1bSJed Brown 
445c4762a1bSJed Brown   /*
446c4762a1bSJed Brown       Evaluate our function on the nodes owned by this processor
447c4762a1bSJed Brown   */
4489566063dSJacob Faibussowitsch   PetscCall(VecGetLocalSize(local_in, &localsize));
449c4762a1bSJed Brown 
450c4762a1bSJed Brown   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
451c4762a1bSJed Brown      Compute entries for the locally owned part
452c4762a1bSJed Brown      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
453c4762a1bSJed Brown 
454c4762a1bSJed Brown   /*
455c4762a1bSJed Brown      Handle boundary conditions: This is done by using the boundary condition
456c4762a1bSJed Brown         u(t,boundary) = g(t,boundary)
457c4762a1bSJed Brown      for some function g. Now take the derivative with respect to t to obtain
458c4762a1bSJed Brown         u_{t}(t,boundary) = g_{t}(t,boundary)
459c4762a1bSJed Brown 
460c4762a1bSJed Brown      In our case, u(t,0) = t + 1, so that u_{t}(t,0) = 1
461c4762a1bSJed Brown              and  u(t,1) = 2t+ 2, so that u_{t}(t,1) = 2
462c4762a1bSJed Brown   */
4639566063dSJacob Faibussowitsch   PetscCallMPI(MPI_Comm_rank(appctx->comm, &rank));
4649566063dSJacob Faibussowitsch   PetscCallMPI(MPI_Comm_size(appctx->comm, &size));
465dd400576SPatrick Sanan   if (rank == 0) copyptr[0] = 1.0;
466c4762a1bSJed Brown   if (rank == size - 1) copyptr[localsize - 1] = 2.0;
467c4762a1bSJed Brown 
468c4762a1bSJed Brown   /*
469c4762a1bSJed Brown      Handle the interior nodes where the PDE is replace by finite
470c4762a1bSJed Brown      difference operators.
471c4762a1bSJed Brown   */
472c4762a1bSJed Brown   for (i = 1; i < localsize - 1; i++) copyptr[i] = localptr[i] * sc * (localptr[i + 1] + localptr[i - 1] - 2.0 * localptr[i]);
473c4762a1bSJed Brown 
474c4762a1bSJed Brown   /*
475c4762a1bSJed Brown      Restore vectors
476c4762a1bSJed Brown   */
4779566063dSJacob Faibussowitsch   PetscCall(VecRestoreArrayRead(local_in, &localptr));
4789566063dSJacob Faibussowitsch   PetscCall(VecRestoreArray(localwork, &copyptr));
479c4762a1bSJed Brown 
480c4762a1bSJed Brown   /*
481c4762a1bSJed Brown      Insert values from the local OUTPUT vector into the global
482c4762a1bSJed Brown      output vector
483c4762a1bSJed Brown   */
4849566063dSJacob Faibussowitsch   PetscCall(DMLocalToGlobalBegin(da, localwork, INSERT_VALUES, global_out));
4859566063dSJacob Faibussowitsch   PetscCall(DMLocalToGlobalEnd(da, localwork, INSERT_VALUES, global_out));
486c4762a1bSJed Brown 
487c4762a1bSJed Brown   /* Print debugging information if desired */
488c4762a1bSJed Brown   if (appctx->debug) {
4899566063dSJacob Faibussowitsch     PetscCall(PetscPrintf(appctx->comm, "RHS function vector\n"));
4909566063dSJacob Faibussowitsch     PetscCall(VecView(global_out, PETSC_VIEWER_STDOUT_WORLD));
491c4762a1bSJed Brown   }
492c4762a1bSJed Brown 
493*3ba16761SJacob Faibussowitsch   PetscFunctionReturn(PETSC_SUCCESS);
494c4762a1bSJed Brown }
495c4762a1bSJed Brown /* --------------------------------------------------------------------- */
496c4762a1bSJed Brown /*
497c4762a1bSJed Brown    RHSJacobian - User-provided routine to compute the Jacobian of
498c4762a1bSJed Brown    the nonlinear right-hand-side function of the ODE.
499c4762a1bSJed Brown 
500c4762a1bSJed Brown    Input Parameters:
501c4762a1bSJed Brown    ts - the TS context
502c4762a1bSJed Brown    t - current time
503c4762a1bSJed Brown    global_in - global input vector
504c4762a1bSJed Brown    dummy - optional user-defined context, as set by TSetRHSJacobian()
505c4762a1bSJed Brown 
506c4762a1bSJed Brown    Output Parameters:
507c4762a1bSJed Brown    AA - Jacobian matrix
508c4762a1bSJed Brown    BB - optionally different preconditioning matrix
509c4762a1bSJed Brown    str - flag indicating matrix structure
510c4762a1bSJed Brown 
511c4762a1bSJed Brown   Notes:
512c4762a1bSJed Brown   RHSJacobian computes entries for the locally owned part of the Jacobian.
513c4762a1bSJed Brown    - Currently, all PETSc parallel matrix formats are partitioned by
514c4762a1bSJed Brown      contiguous chunks of rows across the processors.
515c4762a1bSJed Brown    - Each processor needs to insert only elements that it owns
516c4762a1bSJed Brown      locally (but any non-local elements will be sent to the
517c4762a1bSJed Brown      appropriate processor during matrix assembly).
518c4762a1bSJed Brown    - Always specify global row and columns of matrix entries when
519c4762a1bSJed Brown      using MatSetValues().
520c4762a1bSJed Brown    - Here, we set all entries for a particular row at once.
521c4762a1bSJed Brown    - Note that MatSetValues() uses 0-based row and column numbers
522c4762a1bSJed Brown      in Fortran as well as in C.
523c4762a1bSJed Brown */
524d71ae5a4SJacob Faibussowitsch PetscErrorCode RHSJacobian(TS ts, PetscReal t, Vec global_in, Mat AA, Mat BB, void *ctx)
525d71ae5a4SJacob Faibussowitsch {
526c4762a1bSJed Brown   AppCtx            *appctx   = (AppCtx *)ctx;   /* user-defined application context */
527c4762a1bSJed Brown   Vec                local_in = appctx->u_local; /* local ghosted input vector */
528c4762a1bSJed Brown   DM                 da       = appctx->da;      /* distributed array */
529c4762a1bSJed Brown   PetscScalar        v[3], sc;
530c4762a1bSJed Brown   const PetscScalar *localptr;
531c4762a1bSJed Brown   PetscInt           i, mstart, mend, mstarts, mends, idx[3], is;
532c4762a1bSJed Brown 
533*3ba16761SJacob Faibussowitsch   PetscFunctionBeginUser;
534c4762a1bSJed Brown   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
535c4762a1bSJed Brown      Get ready for local Jacobian computations
536c4762a1bSJed Brown      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
537c4762a1bSJed Brown   /*
538c4762a1bSJed Brown      Scatter ghost points to local vector, using the 2-step process
539c4762a1bSJed Brown         DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
540c4762a1bSJed Brown      By placing code between these two statements, computations can be
541c4762a1bSJed Brown      done while messages are in transition.
542c4762a1bSJed Brown   */
5439566063dSJacob Faibussowitsch   PetscCall(DMGlobalToLocalBegin(da, global_in, INSERT_VALUES, local_in));
5449566063dSJacob Faibussowitsch   PetscCall(DMGlobalToLocalEnd(da, global_in, INSERT_VALUES, local_in));
545c4762a1bSJed Brown 
546c4762a1bSJed Brown   /*
547c4762a1bSJed Brown      Get pointer to vector data
548c4762a1bSJed Brown   */
5499566063dSJacob Faibussowitsch   PetscCall(VecGetArrayRead(local_in, &localptr));
550c4762a1bSJed Brown 
551c4762a1bSJed Brown   /*
552c4762a1bSJed Brown      Get starting and ending locally owned rows of the matrix
553c4762a1bSJed Brown   */
5549566063dSJacob Faibussowitsch   PetscCall(MatGetOwnershipRange(BB, &mstarts, &mends));
5559371c9d4SSatish Balay   mstart = mstarts;
5569371c9d4SSatish Balay   mend   = mends;
557c4762a1bSJed Brown 
558c4762a1bSJed Brown   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
559c4762a1bSJed Brown      Compute entries for the locally owned part of the Jacobian.
560c4762a1bSJed Brown       - Currently, all PETSc parallel matrix formats are partitioned by
561c4762a1bSJed Brown         contiguous chunks of rows across the processors.
562c4762a1bSJed Brown       - Each processor needs to insert only elements that it owns
563c4762a1bSJed Brown         locally (but any non-local elements will be sent to the
564c4762a1bSJed Brown         appropriate processor during matrix assembly).
565c4762a1bSJed Brown       - Here, we set all entries for a particular row at once.
566c4762a1bSJed Brown       - We can set matrix entries either using either
567c4762a1bSJed Brown         MatSetValuesLocal() or MatSetValues().
568c4762a1bSJed Brown      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
569c4762a1bSJed Brown 
570c4762a1bSJed Brown   /*
571c4762a1bSJed Brown      Set matrix rows corresponding to boundary data
572c4762a1bSJed Brown   */
573c4762a1bSJed Brown   if (mstart == 0) {
574c4762a1bSJed Brown     v[0] = 0.0;
5759566063dSJacob Faibussowitsch     PetscCall(MatSetValues(BB, 1, &mstart, 1, &mstart, v, INSERT_VALUES));
576c4762a1bSJed Brown     mstart++;
577c4762a1bSJed Brown   }
578c4762a1bSJed Brown   if (mend == appctx->m) {
579c4762a1bSJed Brown     mend--;
580c4762a1bSJed Brown     v[0] = 0.0;
5819566063dSJacob Faibussowitsch     PetscCall(MatSetValues(BB, 1, &mend, 1, &mend, v, INSERT_VALUES));
582c4762a1bSJed Brown   }
583c4762a1bSJed Brown 
584c4762a1bSJed Brown   /*
585c4762a1bSJed Brown      Set matrix rows corresponding to interior data.  We construct the
586c4762a1bSJed Brown      matrix one row at a time.
587c4762a1bSJed Brown   */
588c4762a1bSJed Brown   sc = 1.0 / (appctx->h * appctx->h * 2.0 * (1.0 + t) * (1.0 + t));
589c4762a1bSJed Brown   for (i = mstart; i < mend; i++) {
5909371c9d4SSatish Balay     idx[0] = i - 1;
5919371c9d4SSatish Balay     idx[1] = i;
5929371c9d4SSatish Balay     idx[2] = i + 1;
593c4762a1bSJed Brown     is     = i - mstart + 1;
594c4762a1bSJed Brown     v[0]   = sc * localptr[is];
595c4762a1bSJed Brown     v[1]   = sc * (localptr[is + 1] + localptr[is - 1] - 4.0 * localptr[is]);
596c4762a1bSJed Brown     v[2]   = sc * localptr[is];
5979566063dSJacob Faibussowitsch     PetscCall(MatSetValues(BB, 1, &i, 3, idx, v, INSERT_VALUES));
598c4762a1bSJed Brown   }
599c4762a1bSJed Brown 
600c4762a1bSJed Brown   /*
601c4762a1bSJed Brown      Restore vector
602c4762a1bSJed Brown   */
6039566063dSJacob Faibussowitsch   PetscCall(VecRestoreArrayRead(local_in, &localptr));
604c4762a1bSJed Brown 
605c4762a1bSJed Brown   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
606c4762a1bSJed Brown      Complete the matrix assembly process and set some options
607c4762a1bSJed Brown      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
608c4762a1bSJed Brown   /*
609c4762a1bSJed Brown      Assemble matrix, using the 2-step process:
610c4762a1bSJed Brown        MatAssemblyBegin(), MatAssemblyEnd()
611c4762a1bSJed Brown      Computations can be done while messages are in transition
612c4762a1bSJed Brown      by placing code between these two statements.
613c4762a1bSJed Brown   */
6149566063dSJacob Faibussowitsch   PetscCall(MatAssemblyBegin(BB, MAT_FINAL_ASSEMBLY));
6159566063dSJacob Faibussowitsch   PetscCall(MatAssemblyEnd(BB, MAT_FINAL_ASSEMBLY));
616c4762a1bSJed Brown   if (BB != AA) {
6179566063dSJacob Faibussowitsch     PetscCall(MatAssemblyBegin(AA, MAT_FINAL_ASSEMBLY));
6189566063dSJacob Faibussowitsch     PetscCall(MatAssemblyEnd(AA, MAT_FINAL_ASSEMBLY));
619c4762a1bSJed Brown   }
620c4762a1bSJed Brown 
621c4762a1bSJed Brown   /*
622c4762a1bSJed Brown      Set and option to indicate that we will never add a new nonzero location
623c4762a1bSJed Brown      to the matrix. If we do, it will generate an error.
624c4762a1bSJed Brown   */
6259566063dSJacob Faibussowitsch   PetscCall(MatSetOption(BB, MAT_NEW_NONZERO_LOCATION_ERR, PETSC_TRUE));
626c4762a1bSJed Brown 
627*3ba16761SJacob Faibussowitsch   PetscFunctionReturn(PETSC_SUCCESS);
628c4762a1bSJed Brown }
629c4762a1bSJed Brown 
630c4762a1bSJed Brown /*TEST
631c4762a1bSJed Brown 
632c4762a1bSJed Brown     test:
633c4762a1bSJed Brown       args: -nox -ts_dt 10 -mymonitor
634c4762a1bSJed Brown       nsize: 2
635c4762a1bSJed Brown       requires: !single
636c4762a1bSJed Brown 
637c4762a1bSJed Brown     test:
638c4762a1bSJed Brown       suffix: tut_1
639c4762a1bSJed Brown       nsize: 1
640c4762a1bSJed Brown       args: -ts_max_steps 10 -ts_monitor
641c4762a1bSJed Brown 
642c4762a1bSJed Brown     test:
643c4762a1bSJed Brown       suffix: tut_2
644c4762a1bSJed Brown       nsize: 4
645c4762a1bSJed Brown       args: -ts_max_steps 10 -ts_monitor -snes_monitor -ksp_monitor
646c4762a1bSJed Brown 
647c4762a1bSJed Brown     test:
648c4762a1bSJed Brown       suffix: tut_3
649c4762a1bSJed Brown       nsize: 4
6502e16c0ceSBarry Smith       args: -ts_max_steps 10 -ts_monitor -M 128
651c4762a1bSJed Brown 
652c4762a1bSJed Brown TEST*/
653