1c4762a1bSJed Brown static char help[] = "Solves a time-dependent nonlinear PDE. Uses implicit\n\
2c4762a1bSJed Brown timestepping. Runtime options include:\n\
3c4762a1bSJed Brown -M <xg>, where <xg> = number of grid points\n\
4c4762a1bSJed Brown -debug : Activate debugging printouts\n\
5c4762a1bSJed Brown -nox : Deactivate x-window graphics\n\n";
6c4762a1bSJed Brown
7c4762a1bSJed Brown /* ------------------------------------------------------------------------
8c4762a1bSJed Brown
9c4762a1bSJed Brown This program solves the PDE
10c4762a1bSJed Brown
11c4762a1bSJed Brown u * u_xx
12c4762a1bSJed Brown u_t = ---------
13c4762a1bSJed Brown 2*(t+1)^2
14c4762a1bSJed Brown
15c4762a1bSJed Brown on the domain 0 <= x <= 1, with boundary conditions
16c4762a1bSJed Brown u(t,0) = t + 1, u(t,1) = 2*t + 2,
17c4762a1bSJed Brown and initial condition
18c4762a1bSJed Brown u(0,x) = 1 + x*x.
19c4762a1bSJed Brown
20c4762a1bSJed Brown The exact solution is:
21c4762a1bSJed Brown u(t,x) = (1 + x*x) * (1 + t)
22c4762a1bSJed Brown
23c4762a1bSJed Brown Note that since the solution is linear in time and quadratic in x,
24c4762a1bSJed Brown the finite difference scheme actually computes the "exact" solution.
25c4762a1bSJed Brown
26c4762a1bSJed Brown We use by default the backward Euler method.
27c4762a1bSJed Brown
28c4762a1bSJed Brown ------------------------------------------------------------------------- */
29c4762a1bSJed Brown
30c4762a1bSJed Brown /*
31c4762a1bSJed Brown Include "petscts.h" to use the PETSc timestepping routines. Note that
32c4762a1bSJed Brown this file automatically includes "petscsys.h" and other lower-level
33c4762a1bSJed Brown PETSc include files.
34c4762a1bSJed Brown
35c4762a1bSJed Brown Include the "petscdmda.h" to allow us to use the distributed array data
36c4762a1bSJed Brown structures to manage the parallel grid.
37c4762a1bSJed Brown */
38c4762a1bSJed Brown #include <petscts.h>
39c4762a1bSJed Brown #include <petscdm.h>
40c4762a1bSJed Brown #include <petscdmda.h>
41c4762a1bSJed Brown #include <petscdraw.h>
42c4762a1bSJed Brown
43c4762a1bSJed Brown /*
44c4762a1bSJed Brown User-defined application context - contains data needed by the
45c4762a1bSJed Brown application-provided callback routines.
46c4762a1bSJed Brown */
47c4762a1bSJed Brown typedef struct {
48c4762a1bSJed Brown MPI_Comm comm; /* communicator */
49c4762a1bSJed Brown DM da; /* distributed array data structure */
50c4762a1bSJed Brown Vec localwork; /* local ghosted work vector */
51c4762a1bSJed Brown Vec u_local; /* local ghosted approximate solution vector */
52c4762a1bSJed Brown Vec solution; /* global exact solution vector */
53c4762a1bSJed Brown PetscInt m; /* total number of grid points */
54c4762a1bSJed Brown PetscReal h; /* mesh width: h = 1/(m-1) */
55c4762a1bSJed Brown PetscBool debug; /* flag (1 indicates activation of debugging printouts) */
56c4762a1bSJed Brown } AppCtx;
57c4762a1bSJed Brown
58c4762a1bSJed Brown /*
59c4762a1bSJed Brown User-defined routines, provided below.
60c4762a1bSJed Brown */
61c4762a1bSJed Brown extern PetscErrorCode InitialConditions(Vec, AppCtx *);
62c4762a1bSJed Brown extern PetscErrorCode RHSFunction(TS, PetscReal, Vec, Vec, void *);
63c4762a1bSJed Brown extern PetscErrorCode RHSJacobian(TS, PetscReal, Vec, Mat, Mat, void *);
64c4762a1bSJed Brown extern PetscErrorCode Monitor(TS, PetscInt, PetscReal, Vec, void *);
65c4762a1bSJed Brown extern PetscErrorCode ExactSolution(PetscReal, Vec, AppCtx *);
66c4762a1bSJed Brown
main(int argc,char ** argv)67d71ae5a4SJacob Faibussowitsch int main(int argc, char **argv)
68d71ae5a4SJacob Faibussowitsch {
69c4762a1bSJed Brown AppCtx appctx; /* user-defined application context */
70c4762a1bSJed Brown TS ts; /* timestepping context */
71c4762a1bSJed Brown Mat A; /* Jacobian matrix data structure */
72c4762a1bSJed Brown Vec u; /* approximate solution vector */
73c4762a1bSJed Brown PetscInt time_steps_max = 100; /* default max timesteps */
74c4762a1bSJed Brown PetscReal dt;
75c4762a1bSJed Brown PetscReal time_total_max = 100.0; /* default max total time */
76c4762a1bSJed Brown PetscBool mymonitor = PETSC_FALSE;
77c4762a1bSJed Brown PetscReal bounds[] = {1.0, 3.3};
78c4762a1bSJed Brown
79c4762a1bSJed Brown /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
80c4762a1bSJed Brown Initialize program and set problem parameters
81c4762a1bSJed Brown - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
82c4762a1bSJed Brown
83327415f7SBarry Smith PetscFunctionBeginUser;
84c8025a54SPierre Jolivet PetscCall(PetscInitialize(&argc, &argv, NULL, help));
859566063dSJacob Faibussowitsch PetscCall(PetscViewerDrawSetBounds(PETSC_VIEWER_DRAW_(PETSC_COMM_WORLD), 1, bounds));
86c4762a1bSJed Brown
87c4762a1bSJed Brown appctx.comm = PETSC_COMM_WORLD;
88c4762a1bSJed Brown appctx.m = 60;
89c4762a1bSJed Brown
909566063dSJacob Faibussowitsch PetscCall(PetscOptionsGetInt(NULL, NULL, "-M", &appctx.m, NULL));
919566063dSJacob Faibussowitsch PetscCall(PetscOptionsHasName(NULL, NULL, "-debug", &appctx.debug));
929566063dSJacob Faibussowitsch PetscCall(PetscOptionsHasName(NULL, NULL, "-mymonitor", &mymonitor));
93c4762a1bSJed Brown
94c4762a1bSJed Brown appctx.h = 1.0 / (appctx.m - 1.0);
95c4762a1bSJed Brown
96c4762a1bSJed Brown /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
97c4762a1bSJed Brown Create vector data structures
98c4762a1bSJed Brown - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
99c4762a1bSJed Brown
100c4762a1bSJed Brown /*
101c4762a1bSJed Brown Create distributed array (DMDA) to manage parallel grid and vectors
102c4762a1bSJed Brown and to set up the ghost point communication pattern. There are M
103c4762a1bSJed Brown total grid values spread equally among all the processors.
104c4762a1bSJed Brown */
1059566063dSJacob Faibussowitsch PetscCall(DMDACreate1d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, appctx.m, 1, 1, NULL, &appctx.da));
1069566063dSJacob Faibussowitsch PetscCall(DMSetFromOptions(appctx.da));
1079566063dSJacob Faibussowitsch PetscCall(DMSetUp(appctx.da));
108c4762a1bSJed Brown
109c4762a1bSJed Brown /*
110c4762a1bSJed Brown Extract global and local vectors from DMDA; we use these to store the
111c4762a1bSJed Brown approximate solution. Then duplicate these for remaining vectors that
112c4762a1bSJed Brown have the same types.
113c4762a1bSJed Brown */
1149566063dSJacob Faibussowitsch PetscCall(DMCreateGlobalVector(appctx.da, &u));
1159566063dSJacob Faibussowitsch PetscCall(DMCreateLocalVector(appctx.da, &appctx.u_local));
116c4762a1bSJed Brown
117c4762a1bSJed Brown /*
118c4762a1bSJed Brown Create local work vector for use in evaluating right-hand-side function;
119c4762a1bSJed Brown create global work vector for storing exact solution.
120c4762a1bSJed Brown */
1219566063dSJacob Faibussowitsch PetscCall(VecDuplicate(appctx.u_local, &appctx.localwork));
1229566063dSJacob Faibussowitsch PetscCall(VecDuplicate(u, &appctx.solution));
123c4762a1bSJed Brown
124c4762a1bSJed Brown /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
125c4762a1bSJed Brown Create timestepping solver context; set callback routine for
126c4762a1bSJed Brown right-hand-side function evaluation.
127c4762a1bSJed Brown - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
128c4762a1bSJed Brown
1299566063dSJacob Faibussowitsch PetscCall(TSCreate(PETSC_COMM_WORLD, &ts));
1309566063dSJacob Faibussowitsch PetscCall(TSSetProblemType(ts, TS_NONLINEAR));
1319566063dSJacob Faibussowitsch PetscCall(TSSetRHSFunction(ts, NULL, RHSFunction, &appctx));
132c4762a1bSJed Brown
133c4762a1bSJed Brown /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
134c4762a1bSJed Brown Set optional user-defined monitoring routine
135c4762a1bSJed Brown - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
136c4762a1bSJed Brown
13748a46eb9SPierre Jolivet if (mymonitor) PetscCall(TSMonitorSet(ts, Monitor, &appctx, NULL));
138c4762a1bSJed Brown
139c4762a1bSJed Brown /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
140c4762a1bSJed Brown For nonlinear problems, the user can provide a Jacobian evaluation
141c4762a1bSJed Brown routine (or use a finite differencing approximation).
142c4762a1bSJed Brown
143c4762a1bSJed Brown Create matrix data structure; set Jacobian evaluation routine.
144c4762a1bSJed Brown - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
145c4762a1bSJed Brown
1469566063dSJacob Faibussowitsch PetscCall(MatCreate(PETSC_COMM_WORLD, &A));
1479566063dSJacob Faibussowitsch PetscCall(MatSetSizes(A, PETSC_DECIDE, PETSC_DECIDE, appctx.m, appctx.m));
1489566063dSJacob Faibussowitsch PetscCall(MatSetFromOptions(A));
1499566063dSJacob Faibussowitsch PetscCall(MatSetUp(A));
1509566063dSJacob Faibussowitsch PetscCall(TSSetRHSJacobian(ts, A, A, RHSJacobian, &appctx));
151c4762a1bSJed Brown
152c4762a1bSJed Brown /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
153c4762a1bSJed Brown Set solution vector and initial timestep
154c4762a1bSJed Brown - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
155c4762a1bSJed Brown
156c4762a1bSJed Brown dt = appctx.h / 2.0;
1579566063dSJacob Faibussowitsch PetscCall(TSSetTimeStep(ts, dt));
158c4762a1bSJed Brown
159c4762a1bSJed Brown /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
160c4762a1bSJed Brown Customize timestepping solver:
161c4762a1bSJed Brown - Set the solution method to be the Backward Euler method.
162c4762a1bSJed Brown - Set timestepping duration info
163c4762a1bSJed Brown Then set runtime options, which can override these defaults.
164c4762a1bSJed Brown For example,
165c4762a1bSJed Brown -ts_max_steps <maxsteps> -ts_max_time <maxtime>
166c4762a1bSJed Brown to override the defaults set by TSSetMaxSteps()/TSSetMaxTime().
167c4762a1bSJed Brown - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
168c4762a1bSJed Brown
1699566063dSJacob Faibussowitsch PetscCall(TSSetType(ts, TSBEULER));
1709566063dSJacob Faibussowitsch PetscCall(TSSetMaxSteps(ts, time_steps_max));
1719566063dSJacob Faibussowitsch PetscCall(TSSetMaxTime(ts, time_total_max));
1729566063dSJacob Faibussowitsch PetscCall(TSSetExactFinalTime(ts, TS_EXACTFINALTIME_STEPOVER));
1739566063dSJacob Faibussowitsch PetscCall(TSSetFromOptions(ts));
174c4762a1bSJed Brown
175c4762a1bSJed Brown /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
176c4762a1bSJed Brown Solve the problem
177c4762a1bSJed Brown - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
178c4762a1bSJed Brown
179c4762a1bSJed Brown /*
180c4762a1bSJed Brown Evaluate initial conditions
181c4762a1bSJed Brown */
1829566063dSJacob Faibussowitsch PetscCall(InitialConditions(u, &appctx));
183c4762a1bSJed Brown
184c4762a1bSJed Brown /*
185c4762a1bSJed Brown Run the timestepping solver
186c4762a1bSJed Brown */
1879566063dSJacob Faibussowitsch PetscCall(TSSolve(ts, u));
188c4762a1bSJed Brown
189c4762a1bSJed Brown /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
190c4762a1bSJed Brown Free work space. All PETSc objects should be destroyed when they
191c4762a1bSJed Brown are no longer needed.
192c4762a1bSJed Brown - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
193c4762a1bSJed Brown
1949566063dSJacob Faibussowitsch PetscCall(TSDestroy(&ts));
1959566063dSJacob Faibussowitsch PetscCall(VecDestroy(&u));
1969566063dSJacob Faibussowitsch PetscCall(MatDestroy(&A));
1979566063dSJacob Faibussowitsch PetscCall(DMDestroy(&appctx.da));
1989566063dSJacob Faibussowitsch PetscCall(VecDestroy(&appctx.localwork));
1999566063dSJacob Faibussowitsch PetscCall(VecDestroy(&appctx.solution));
2009566063dSJacob Faibussowitsch PetscCall(VecDestroy(&appctx.u_local));
201c4762a1bSJed Brown
202c4762a1bSJed Brown /*
203c4762a1bSJed Brown Always call PetscFinalize() before exiting a program. This routine
204c4762a1bSJed Brown - finalizes the PETSc libraries as well as MPI
205c4762a1bSJed Brown - provides summary and diagnostic information if certain runtime
206c4762a1bSJed Brown options are chosen (e.g., -log_view).
207c4762a1bSJed Brown */
2089566063dSJacob Faibussowitsch PetscCall(PetscFinalize());
209b122ec5aSJacob Faibussowitsch return 0;
210c4762a1bSJed Brown }
211c4762a1bSJed Brown /* --------------------------------------------------------------------- */
212c4762a1bSJed Brown /*
213c4762a1bSJed Brown InitialConditions - Computes the solution at the initial time.
214c4762a1bSJed Brown
215c4762a1bSJed Brown Input Parameters:
216c4762a1bSJed Brown u - uninitialized solution vector (global)
217c4762a1bSJed Brown appctx - user-defined application context
218c4762a1bSJed Brown
219c4762a1bSJed Brown Output Parameter:
220c4762a1bSJed Brown u - vector with solution at initial time (global)
221c4762a1bSJed Brown */
InitialConditions(Vec u,AppCtx * appctx)222d71ae5a4SJacob Faibussowitsch PetscErrorCode InitialConditions(Vec u, AppCtx *appctx)
223d71ae5a4SJacob Faibussowitsch {
224c4762a1bSJed Brown PetscScalar *u_localptr, h = appctx->h, x;
225c4762a1bSJed Brown PetscInt i, mybase, myend;
226c4762a1bSJed Brown
2273ba16761SJacob Faibussowitsch PetscFunctionBeginUser;
228c4762a1bSJed Brown /*
229c4762a1bSJed Brown Determine starting point of each processor's range of
230c4762a1bSJed Brown grid values.
231c4762a1bSJed Brown */
2329566063dSJacob Faibussowitsch PetscCall(VecGetOwnershipRange(u, &mybase, &myend));
233c4762a1bSJed Brown
234c4762a1bSJed Brown /*
235c4762a1bSJed Brown Get a pointer to vector data.
236c4762a1bSJed Brown - For default PETSc vectors, VecGetArray() returns a pointer to
237c4762a1bSJed Brown the data array. Otherwise, the routine is implementation dependent.
238c4762a1bSJed Brown - You MUST call VecRestoreArray() when you no longer need access to
239c4762a1bSJed Brown the array.
240c4762a1bSJed Brown - Note that the Fortran interface to VecGetArray() differs from the
241c4762a1bSJed Brown C version. See the users manual for details.
242c4762a1bSJed Brown */
2439566063dSJacob Faibussowitsch PetscCall(VecGetArray(u, &u_localptr));
244c4762a1bSJed Brown
245c4762a1bSJed Brown /*
246c4762a1bSJed Brown We initialize the solution array by simply writing the solution
247c4762a1bSJed Brown directly into the array locations. Alternatively, we could use
248c4762a1bSJed Brown VecSetValues() or VecSetValuesLocal().
249c4762a1bSJed Brown */
250c4762a1bSJed Brown for (i = mybase; i < myend; i++) {
251c4762a1bSJed Brown x = h * (PetscReal)i; /* current location in global grid */
252c4762a1bSJed Brown u_localptr[i - mybase] = 1.0 + x * x;
253c4762a1bSJed Brown }
254c4762a1bSJed Brown
255c4762a1bSJed Brown /*
256c4762a1bSJed Brown Restore vector
257c4762a1bSJed Brown */
2589566063dSJacob Faibussowitsch PetscCall(VecRestoreArray(u, &u_localptr));
259c4762a1bSJed Brown
260c4762a1bSJed Brown /*
261c4762a1bSJed Brown Print debugging information if desired
262c4762a1bSJed Brown */
263c4762a1bSJed Brown if (appctx->debug) {
2649566063dSJacob Faibussowitsch PetscCall(PetscPrintf(appctx->comm, "initial guess vector\n"));
2659566063dSJacob Faibussowitsch PetscCall(VecView(u, PETSC_VIEWER_STDOUT_WORLD));
266c4762a1bSJed Brown }
2673ba16761SJacob Faibussowitsch PetscFunctionReturn(PETSC_SUCCESS);
268c4762a1bSJed Brown }
269c4762a1bSJed Brown /* --------------------------------------------------------------------- */
270c4762a1bSJed Brown /*
271c4762a1bSJed Brown ExactSolution - Computes the exact solution at a given time.
272c4762a1bSJed Brown
273c4762a1bSJed Brown Input Parameters:
274c4762a1bSJed Brown t - current time
275c4762a1bSJed Brown solution - vector in which exact solution will be computed
276c4762a1bSJed Brown appctx - user-defined application context
277c4762a1bSJed Brown
278c4762a1bSJed Brown Output Parameter:
279c4762a1bSJed Brown solution - vector with the newly computed exact solution
280c4762a1bSJed Brown */
ExactSolution(PetscReal t,Vec solution,AppCtx * appctx)281d71ae5a4SJacob Faibussowitsch PetscErrorCode ExactSolution(PetscReal t, Vec solution, AppCtx *appctx)
282d71ae5a4SJacob Faibussowitsch {
283c4762a1bSJed Brown PetscScalar *s_localptr, h = appctx->h, x;
284c4762a1bSJed Brown PetscInt i, mybase, myend;
285c4762a1bSJed Brown
2863ba16761SJacob Faibussowitsch PetscFunctionBeginUser;
287c4762a1bSJed Brown /*
288c4762a1bSJed Brown Determine starting and ending points of each processor's
289c4762a1bSJed Brown range of grid values
290c4762a1bSJed Brown */
2919566063dSJacob Faibussowitsch PetscCall(VecGetOwnershipRange(solution, &mybase, &myend));
292c4762a1bSJed Brown
293c4762a1bSJed Brown /*
294c4762a1bSJed Brown Get a pointer to vector data.
295c4762a1bSJed Brown */
2969566063dSJacob Faibussowitsch PetscCall(VecGetArray(solution, &s_localptr));
297c4762a1bSJed Brown
298c4762a1bSJed Brown /*
299c4762a1bSJed Brown Simply write the solution directly into the array locations.
300c4762a1bSJed Brown Alternatively, we could use VecSetValues() or VecSetValuesLocal().
301c4762a1bSJed Brown */
302c4762a1bSJed Brown for (i = mybase; i < myend; i++) {
303c4762a1bSJed Brown x = h * (PetscReal)i;
304c4762a1bSJed Brown s_localptr[i - mybase] = (t + 1.0) * (1.0 + x * x);
305c4762a1bSJed Brown }
306c4762a1bSJed Brown
307c4762a1bSJed Brown /*
308c4762a1bSJed Brown Restore vector
309c4762a1bSJed Brown */
3109566063dSJacob Faibussowitsch PetscCall(VecRestoreArray(solution, &s_localptr));
3113ba16761SJacob Faibussowitsch PetscFunctionReturn(PETSC_SUCCESS);
312c4762a1bSJed Brown }
313c4762a1bSJed Brown /* --------------------------------------------------------------------- */
314c4762a1bSJed Brown /*
315c4762a1bSJed Brown Monitor - User-provided routine to monitor the solution computed at
316c4762a1bSJed Brown each timestep. This example plots the solution and computes the
317c4762a1bSJed Brown error in two different norms.
318c4762a1bSJed Brown
319c4762a1bSJed Brown Input Parameters:
320c4762a1bSJed Brown ts - the timestep context
321c4762a1bSJed Brown step - the count of the current step (with 0 meaning the
322c4762a1bSJed Brown initial condition)
323c4762a1bSJed Brown time - the current time
324c4762a1bSJed Brown u - the solution at this timestep
325c4762a1bSJed Brown ctx - the user-provided context for this monitoring routine.
326c4762a1bSJed Brown In this case we use the application context which contains
327c4762a1bSJed Brown information about the problem size, workspace and the exact
328c4762a1bSJed Brown solution.
329c4762a1bSJed Brown */
Monitor(TS ts,PetscInt step,PetscReal time,Vec u,PetscCtx ctx)330*2a8381b2SBarry Smith PetscErrorCode Monitor(TS ts, PetscInt step, PetscReal time, Vec u, PetscCtx ctx)
331d71ae5a4SJacob Faibussowitsch {
332c4762a1bSJed Brown AppCtx *appctx = (AppCtx *)ctx; /* user-defined application context */
333c4762a1bSJed Brown PetscReal en2, en2s, enmax;
334c4762a1bSJed Brown PetscDraw draw;
335c4762a1bSJed Brown
3363ba16761SJacob Faibussowitsch PetscFunctionBeginUser;
337c4762a1bSJed Brown /*
338e1dfdf8eSBarry Smith We use the default X Windows viewer
339c4762a1bSJed Brown PETSC_VIEWER_DRAW_(appctx->comm)
340c4762a1bSJed Brown that is associated with the current communicator. This saves
341c4762a1bSJed Brown the effort of calling PetscViewerDrawOpen() to create the window.
342c4762a1bSJed Brown Note that if we wished to plot several items in separate windows we
343c4762a1bSJed Brown would create each viewer with PetscViewerDrawOpen() and store them in
344c4762a1bSJed Brown the application context, appctx.
345c4762a1bSJed Brown
346c4762a1bSJed Brown PetscReal buffering makes graphics look better.
347c4762a1bSJed Brown */
3489566063dSJacob Faibussowitsch PetscCall(PetscViewerDrawGetDraw(PETSC_VIEWER_DRAW_(appctx->comm), 0, &draw));
3499566063dSJacob Faibussowitsch PetscCall(PetscDrawSetDoubleBuffer(draw));
3509566063dSJacob Faibussowitsch PetscCall(VecView(u, PETSC_VIEWER_DRAW_(appctx->comm)));
351c4762a1bSJed Brown
352c4762a1bSJed Brown /*
353c4762a1bSJed Brown Compute the exact solution at this timestep
354c4762a1bSJed Brown */
3559566063dSJacob Faibussowitsch PetscCall(ExactSolution(time, appctx->solution, appctx));
356c4762a1bSJed Brown
357c4762a1bSJed Brown /*
358c4762a1bSJed Brown Print debugging information if desired
359c4762a1bSJed Brown */
360c4762a1bSJed Brown if (appctx->debug) {
3619566063dSJacob Faibussowitsch PetscCall(PetscPrintf(appctx->comm, "Computed solution vector\n"));
3629566063dSJacob Faibussowitsch PetscCall(VecView(u, PETSC_VIEWER_STDOUT_WORLD));
3639566063dSJacob Faibussowitsch PetscCall(PetscPrintf(appctx->comm, "Exact solution vector\n"));
3649566063dSJacob Faibussowitsch PetscCall(VecView(appctx->solution, PETSC_VIEWER_STDOUT_WORLD));
365c4762a1bSJed Brown }
366c4762a1bSJed Brown
367c4762a1bSJed Brown /*
368c4762a1bSJed Brown Compute the 2-norm and max-norm of the error
369c4762a1bSJed Brown */
3709566063dSJacob Faibussowitsch PetscCall(VecAXPY(appctx->solution, -1.0, u));
3719566063dSJacob Faibussowitsch PetscCall(VecNorm(appctx->solution, NORM_2, &en2));
372c4762a1bSJed Brown en2s = PetscSqrtReal(appctx->h) * en2; /* scale the 2-norm by the grid spacing */
3739566063dSJacob Faibussowitsch PetscCall(VecNorm(appctx->solution, NORM_MAX, &enmax));
374c4762a1bSJed Brown
375c4762a1bSJed Brown /*
376c4762a1bSJed Brown PetscPrintf() causes only the first processor in this
377c4762a1bSJed Brown communicator to print the timestep information.
378c4762a1bSJed Brown */
37963a3b9bcSJacob Faibussowitsch PetscCall(PetscPrintf(appctx->comm, "Timestep %" PetscInt_FMT ": time = %g 2-norm error = %g max norm error = %g\n", step, (double)time, (double)en2s, (double)enmax));
380c4762a1bSJed Brown
381c4762a1bSJed Brown /*
382c4762a1bSJed Brown Print debugging information if desired
383c4762a1bSJed Brown */
384c4762a1bSJed Brown if (appctx->debug) {
3859566063dSJacob Faibussowitsch PetscCall(PetscPrintf(appctx->comm, "Error vector\n"));
3869566063dSJacob Faibussowitsch PetscCall(VecView(appctx->solution, PETSC_VIEWER_STDOUT_WORLD));
387c4762a1bSJed Brown }
3883ba16761SJacob Faibussowitsch PetscFunctionReturn(PETSC_SUCCESS);
389c4762a1bSJed Brown }
390c4762a1bSJed Brown /* --------------------------------------------------------------------- */
391c4762a1bSJed Brown /*
392c4762a1bSJed Brown RHSFunction - User-provided routine that evalues the right-hand-side
393c4762a1bSJed Brown function of the ODE. This routine is set in the main program by
394c4762a1bSJed Brown calling TSSetRHSFunction(). We compute:
395c4762a1bSJed Brown global_out = F(global_in)
396c4762a1bSJed Brown
397c4762a1bSJed Brown Input Parameters:
398c4762a1bSJed Brown ts - timesteping context
399c4762a1bSJed Brown t - current time
400c4762a1bSJed Brown global_in - vector containing the current iterate
401c4762a1bSJed Brown ctx - (optional) user-provided context for function evaluation.
402c4762a1bSJed Brown In this case we use the appctx defined above.
403c4762a1bSJed Brown
404c4762a1bSJed Brown Output Parameter:
405c4762a1bSJed Brown global_out - vector containing the newly evaluated function
406c4762a1bSJed Brown */
RHSFunction(TS ts,PetscReal t,Vec global_in,Vec global_out,PetscCtx ctx)407*2a8381b2SBarry Smith PetscErrorCode RHSFunction(TS ts, PetscReal t, Vec global_in, Vec global_out, PetscCtx ctx)
408d71ae5a4SJacob Faibussowitsch {
409c4762a1bSJed Brown AppCtx *appctx = (AppCtx *)ctx; /* user-defined application context */
410c4762a1bSJed Brown DM da = appctx->da; /* distributed array */
411c4762a1bSJed Brown Vec local_in = appctx->u_local; /* local ghosted input vector */
412c4762a1bSJed Brown Vec localwork = appctx->localwork; /* local ghosted work vector */
413c4762a1bSJed Brown PetscInt i, localsize;
414c4762a1bSJed Brown PetscMPIInt rank, size;
415c4762a1bSJed Brown PetscScalar *copyptr, sc;
416c4762a1bSJed Brown const PetscScalar *localptr;
417c4762a1bSJed Brown
4183ba16761SJacob Faibussowitsch PetscFunctionBeginUser;
419c4762a1bSJed Brown /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
420c4762a1bSJed Brown Get ready for local function computations
421c4762a1bSJed Brown - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
422c4762a1bSJed Brown /*
423c4762a1bSJed Brown Scatter ghost points to local vector, using the 2-step process
424c4762a1bSJed Brown DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
425c4762a1bSJed Brown By placing code between these two statements, computations can be
426c4762a1bSJed Brown done while messages are in transition.
427c4762a1bSJed Brown */
4289566063dSJacob Faibussowitsch PetscCall(DMGlobalToLocalBegin(da, global_in, INSERT_VALUES, local_in));
4299566063dSJacob Faibussowitsch PetscCall(DMGlobalToLocalEnd(da, global_in, INSERT_VALUES, local_in));
430c4762a1bSJed Brown
431c4762a1bSJed Brown /*
432c4762a1bSJed Brown Access directly the values in our local INPUT work array
433c4762a1bSJed Brown */
4349566063dSJacob Faibussowitsch PetscCall(VecGetArrayRead(local_in, &localptr));
435c4762a1bSJed Brown
436c4762a1bSJed Brown /*
437c4762a1bSJed Brown Access directly the values in our local OUTPUT work array
438c4762a1bSJed Brown */
4399566063dSJacob Faibussowitsch PetscCall(VecGetArray(localwork, ©ptr));
440c4762a1bSJed Brown
441c4762a1bSJed Brown sc = 1.0 / (appctx->h * appctx->h * 2.0 * (1.0 + t) * (1.0 + t));
442c4762a1bSJed Brown
443c4762a1bSJed Brown /*
444c4762a1bSJed Brown Evaluate our function on the nodes owned by this processor
445c4762a1bSJed Brown */
4469566063dSJacob Faibussowitsch PetscCall(VecGetLocalSize(local_in, &localsize));
447c4762a1bSJed Brown
448c4762a1bSJed Brown /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
449c4762a1bSJed Brown Compute entries for the locally owned part
450c4762a1bSJed Brown - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
451c4762a1bSJed Brown
452c4762a1bSJed Brown /*
453c4762a1bSJed Brown Handle boundary conditions: This is done by using the boundary condition
454c4762a1bSJed Brown u(t,boundary) = g(t,boundary)
455c4762a1bSJed Brown for some function g. Now take the derivative with respect to t to obtain
456c4762a1bSJed Brown u_{t}(t,boundary) = g_{t}(t,boundary)
457c4762a1bSJed Brown
458c4762a1bSJed Brown In our case, u(t,0) = t + 1, so that u_{t}(t,0) = 1
459c4762a1bSJed Brown and u(t,1) = 2t+ 2, so that u_{t}(t,1) = 2
460c4762a1bSJed Brown */
4619566063dSJacob Faibussowitsch PetscCallMPI(MPI_Comm_rank(appctx->comm, &rank));
4629566063dSJacob Faibussowitsch PetscCallMPI(MPI_Comm_size(appctx->comm, &size));
463dd400576SPatrick Sanan if (rank == 0) copyptr[0] = 1.0;
464c4762a1bSJed Brown if (rank == size - 1) copyptr[localsize - 1] = 2.0;
465c4762a1bSJed Brown
466c4762a1bSJed Brown /*
467c4762a1bSJed Brown Handle the interior nodes where the PDE is replace by finite
468c4762a1bSJed Brown difference operators.
469c4762a1bSJed Brown */
470c4762a1bSJed Brown for (i = 1; i < localsize - 1; i++) copyptr[i] = localptr[i] * sc * (localptr[i + 1] + localptr[i - 1] - 2.0 * localptr[i]);
471c4762a1bSJed Brown
472c4762a1bSJed Brown /*
473c4762a1bSJed Brown Restore vectors
474c4762a1bSJed Brown */
4759566063dSJacob Faibussowitsch PetscCall(VecRestoreArrayRead(local_in, &localptr));
4769566063dSJacob Faibussowitsch PetscCall(VecRestoreArray(localwork, ©ptr));
477c4762a1bSJed Brown
478c4762a1bSJed Brown /*
479c4762a1bSJed Brown Insert values from the local OUTPUT vector into the global
480c4762a1bSJed Brown output vector
481c4762a1bSJed Brown */
4829566063dSJacob Faibussowitsch PetscCall(DMLocalToGlobalBegin(da, localwork, INSERT_VALUES, global_out));
4839566063dSJacob Faibussowitsch PetscCall(DMLocalToGlobalEnd(da, localwork, INSERT_VALUES, global_out));
484c4762a1bSJed Brown
485c4762a1bSJed Brown /* Print debugging information if desired */
486c4762a1bSJed Brown if (appctx->debug) {
4879566063dSJacob Faibussowitsch PetscCall(PetscPrintf(appctx->comm, "RHS function vector\n"));
4889566063dSJacob Faibussowitsch PetscCall(VecView(global_out, PETSC_VIEWER_STDOUT_WORLD));
489c4762a1bSJed Brown }
4903ba16761SJacob Faibussowitsch PetscFunctionReturn(PETSC_SUCCESS);
491c4762a1bSJed Brown }
492c4762a1bSJed Brown /* --------------------------------------------------------------------- */
493c4762a1bSJed Brown /*
494c4762a1bSJed Brown RHSJacobian - User-provided routine to compute the Jacobian of
495c4762a1bSJed Brown the nonlinear right-hand-side function of the ODE.
496c4762a1bSJed Brown
497c4762a1bSJed Brown Input Parameters:
498c4762a1bSJed Brown ts - the TS context
499c4762a1bSJed Brown t - current time
500c4762a1bSJed Brown global_in - global input vector
501c4762a1bSJed Brown dummy - optional user-defined context, as set by TSetRHSJacobian()
502c4762a1bSJed Brown
503c4762a1bSJed Brown Output Parameters:
504c4762a1bSJed Brown AA - Jacobian matrix
5057addb90fSBarry Smith BB - optionally different matrix used to construct the preconditioner
506c4762a1bSJed Brown
507c4762a1bSJed Brown Notes:
508c4762a1bSJed Brown RHSJacobian computes entries for the locally owned part of the Jacobian.
509c4762a1bSJed Brown - Currently, all PETSc parallel matrix formats are partitioned by
510c4762a1bSJed Brown contiguous chunks of rows across the processors.
511c4762a1bSJed Brown - Each processor needs to insert only elements that it owns
512c4762a1bSJed Brown locally (but any non-local elements will be sent to the
513c4762a1bSJed Brown appropriate processor during matrix assembly).
514c4762a1bSJed Brown - Always specify global row and columns of matrix entries when
515c4762a1bSJed Brown using MatSetValues().
516c4762a1bSJed Brown - Here, we set all entries for a particular row at once.
517c4762a1bSJed Brown - Note that MatSetValues() uses 0-based row and column numbers
518c4762a1bSJed Brown in Fortran as well as in C.
519c4762a1bSJed Brown */
RHSJacobian(TS ts,PetscReal t,Vec global_in,Mat AA,Mat BB,PetscCtx ctx)520*2a8381b2SBarry Smith PetscErrorCode RHSJacobian(TS ts, PetscReal t, Vec global_in, Mat AA, Mat BB, PetscCtx ctx)
521d71ae5a4SJacob Faibussowitsch {
522c4762a1bSJed Brown AppCtx *appctx = (AppCtx *)ctx; /* user-defined application context */
523c4762a1bSJed Brown Vec local_in = appctx->u_local; /* local ghosted input vector */
524c4762a1bSJed Brown DM da = appctx->da; /* distributed array */
525c4762a1bSJed Brown PetscScalar v[3], sc;
526c4762a1bSJed Brown const PetscScalar *localptr;
527c4762a1bSJed Brown PetscInt i, mstart, mend, mstarts, mends, idx[3], is;
528c4762a1bSJed Brown
5293ba16761SJacob Faibussowitsch PetscFunctionBeginUser;
530c4762a1bSJed Brown /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
531c4762a1bSJed Brown Get ready for local Jacobian computations
532c4762a1bSJed Brown - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
533c4762a1bSJed Brown /*
534c4762a1bSJed Brown Scatter ghost points to local vector, using the 2-step process
535c4762a1bSJed Brown DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
536c4762a1bSJed Brown By placing code between these two statements, computations can be
537c4762a1bSJed Brown done while messages are in transition.
538c4762a1bSJed Brown */
5399566063dSJacob Faibussowitsch PetscCall(DMGlobalToLocalBegin(da, global_in, INSERT_VALUES, local_in));
5409566063dSJacob Faibussowitsch PetscCall(DMGlobalToLocalEnd(da, global_in, INSERT_VALUES, local_in));
541c4762a1bSJed Brown
542c4762a1bSJed Brown /*
543c4762a1bSJed Brown Get pointer to vector data
544c4762a1bSJed Brown */
5459566063dSJacob Faibussowitsch PetscCall(VecGetArrayRead(local_in, &localptr));
546c4762a1bSJed Brown
547c4762a1bSJed Brown /*
548c4762a1bSJed Brown Get starting and ending locally owned rows of the matrix
549c4762a1bSJed Brown */
5509566063dSJacob Faibussowitsch PetscCall(MatGetOwnershipRange(BB, &mstarts, &mends));
5519371c9d4SSatish Balay mstart = mstarts;
5529371c9d4SSatish Balay mend = mends;
553c4762a1bSJed Brown
554c4762a1bSJed Brown /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
555c4762a1bSJed Brown Compute entries for the locally owned part of the Jacobian.
556c4762a1bSJed Brown - Currently, all PETSc parallel matrix formats are partitioned by
557c4762a1bSJed Brown contiguous chunks of rows across the processors.
558c4762a1bSJed Brown - Each processor needs to insert only elements that it owns
559c4762a1bSJed Brown locally (but any non-local elements will be sent to the
560c4762a1bSJed Brown appropriate processor during matrix assembly).
561c4762a1bSJed Brown - Here, we set all entries for a particular row at once.
562c4762a1bSJed Brown - We can set matrix entries either using either
563c4762a1bSJed Brown MatSetValuesLocal() or MatSetValues().
564c4762a1bSJed Brown - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
565c4762a1bSJed Brown
566c4762a1bSJed Brown /*
567c4762a1bSJed Brown Set matrix rows corresponding to boundary data
568c4762a1bSJed Brown */
569c4762a1bSJed Brown if (mstart == 0) {
570c4762a1bSJed Brown v[0] = 0.0;
5719566063dSJacob Faibussowitsch PetscCall(MatSetValues(BB, 1, &mstart, 1, &mstart, v, INSERT_VALUES));
572c4762a1bSJed Brown mstart++;
573c4762a1bSJed Brown }
574c4762a1bSJed Brown if (mend == appctx->m) {
575c4762a1bSJed Brown mend--;
576c4762a1bSJed Brown v[0] = 0.0;
5779566063dSJacob Faibussowitsch PetscCall(MatSetValues(BB, 1, &mend, 1, &mend, v, INSERT_VALUES));
578c4762a1bSJed Brown }
579c4762a1bSJed Brown
580c4762a1bSJed Brown /*
581c4762a1bSJed Brown Set matrix rows corresponding to interior data. We construct the
582c4762a1bSJed Brown matrix one row at a time.
583c4762a1bSJed Brown */
584c4762a1bSJed Brown sc = 1.0 / (appctx->h * appctx->h * 2.0 * (1.0 + t) * (1.0 + t));
585c4762a1bSJed Brown for (i = mstart; i < mend; i++) {
5869371c9d4SSatish Balay idx[0] = i - 1;
5879371c9d4SSatish Balay idx[1] = i;
5889371c9d4SSatish Balay idx[2] = i + 1;
589c4762a1bSJed Brown is = i - mstart + 1;
590c4762a1bSJed Brown v[0] = sc * localptr[is];
591c4762a1bSJed Brown v[1] = sc * (localptr[is + 1] + localptr[is - 1] - 4.0 * localptr[is]);
592c4762a1bSJed Brown v[2] = sc * localptr[is];
5939566063dSJacob Faibussowitsch PetscCall(MatSetValues(BB, 1, &i, 3, idx, v, INSERT_VALUES));
594c4762a1bSJed Brown }
595c4762a1bSJed Brown
596c4762a1bSJed Brown /*
597c4762a1bSJed Brown Restore vector
598c4762a1bSJed Brown */
5999566063dSJacob Faibussowitsch PetscCall(VecRestoreArrayRead(local_in, &localptr));
600c4762a1bSJed Brown
601c4762a1bSJed Brown /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
602c4762a1bSJed Brown Complete the matrix assembly process and set some options
603c4762a1bSJed Brown - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
604c4762a1bSJed Brown /*
605c4762a1bSJed Brown Assemble matrix, using the 2-step process:
606c4762a1bSJed Brown MatAssemblyBegin(), MatAssemblyEnd()
607c4762a1bSJed Brown Computations can be done while messages are in transition
608c4762a1bSJed Brown by placing code between these two statements.
609c4762a1bSJed Brown */
6109566063dSJacob Faibussowitsch PetscCall(MatAssemblyBegin(BB, MAT_FINAL_ASSEMBLY));
6119566063dSJacob Faibussowitsch PetscCall(MatAssemblyEnd(BB, MAT_FINAL_ASSEMBLY));
612c4762a1bSJed Brown if (BB != AA) {
6139566063dSJacob Faibussowitsch PetscCall(MatAssemblyBegin(AA, MAT_FINAL_ASSEMBLY));
6149566063dSJacob Faibussowitsch PetscCall(MatAssemblyEnd(AA, MAT_FINAL_ASSEMBLY));
615c4762a1bSJed Brown }
616c4762a1bSJed Brown
617c4762a1bSJed Brown /*
618c4762a1bSJed Brown Set and option to indicate that we will never add a new nonzero location
619c4762a1bSJed Brown to the matrix. If we do, it will generate an error.
620c4762a1bSJed Brown */
6219566063dSJacob Faibussowitsch PetscCall(MatSetOption(BB, MAT_NEW_NONZERO_LOCATION_ERR, PETSC_TRUE));
6223ba16761SJacob Faibussowitsch PetscFunctionReturn(PETSC_SUCCESS);
623c4762a1bSJed Brown }
624c4762a1bSJed Brown
625c4762a1bSJed Brown /*TEST
626c4762a1bSJed Brown
627c4762a1bSJed Brown test:
628188af4bfSBarry Smith args: -nox -ts_time_step 10 -mymonitor
629c4762a1bSJed Brown nsize: 2
630c4762a1bSJed Brown requires: !single
631c4762a1bSJed Brown
632c4762a1bSJed Brown test:
633c4762a1bSJed Brown suffix: tut_1
634c4762a1bSJed Brown nsize: 1
635c4762a1bSJed Brown args: -ts_max_steps 10 -ts_monitor
636c4762a1bSJed Brown
637c4762a1bSJed Brown test:
638c4762a1bSJed Brown suffix: tut_2
639c4762a1bSJed Brown nsize: 4
640c4762a1bSJed Brown args: -ts_max_steps 10 -ts_monitor -snes_monitor -ksp_monitor
641d8b4a066SPierre Jolivet # GEMV sensitive to single
6429d5502f9SJunchao Zhang args: -vec_mdot_use_gemv 0 -vec_maxpy_use_gemv 0
643c4762a1bSJed Brown
644c4762a1bSJed Brown test:
645c4762a1bSJed Brown suffix: tut_3
646c4762a1bSJed Brown nsize: 4
6472e16c0ceSBarry Smith args: -ts_max_steps 10 -ts_monitor -M 128
648c4762a1bSJed Brown
649c4762a1bSJed Brown TEST*/
650