xref: /petsc/src/ts/tests/ex12.c (revision fbf9dbe564678ed6eff1806adbc4c4f01b9743f4)
1 
2 static char help[] = "Tests PetscObjectSetOptions() for TS object\n\n";
3 
4 /* ------------------------------------------------------------------------
5 
6    This program solves the PDE
7 
8                u * u_xx
9          u_t = ---------
10                2*(t+1)^2
11 
12     on the domain 0 <= x <= 1, with boundary conditions
13          u(t,0) = t + 1,  u(t,1) = 2*t + 2,
14     and initial condition
15          u(0,x) = 1 + x*x.
16 
17     The exact solution is:
18          u(t,x) = (1 + x*x) * (1 + t)
19 
20     Note that since the solution is linear in time and quadratic in x,
21     the finite difference scheme actually computes the "exact" solution.
22 
23     We use by default the backward Euler method.
24 
25   ------------------------------------------------------------------------- */
26 
27 /*
28    Include "petscts.h" to use the PETSc timestepping routines. Note that
29    this file automatically includes "petscsys.h" and other lower-level
30    PETSc include files.
31 
32    Include the "petscdmda.h" to allow us to use the distributed array data
33    structures to manage the parallel grid.
34 */
35 #include <petscts.h>
36 #include <petscdm.h>
37 #include <petscdmda.h>
38 #include <petscdraw.h>
39 
40 /*
41    User-defined application context - contains data needed by the
42    application-provided callback routines.
43 */
44 typedef struct {
45   MPI_Comm  comm;      /* communicator */
46   DM        da;        /* distributed array data structure */
47   Vec       localwork; /* local ghosted work vector */
48   Vec       u_local;   /* local ghosted approximate solution vector */
49   Vec       solution;  /* global exact solution vector */
50   PetscInt  m;         /* total number of grid points */
51   PetscReal h;         /* mesh width: h = 1/(m-1) */
52 } AppCtx;
53 
54 /*
55    User-defined routines, provided below.
56 */
57 extern PetscErrorCode InitialConditions(Vec, AppCtx *);
58 extern PetscErrorCode RHSFunction(TS, PetscReal, Vec, Vec, void *);
59 extern PetscErrorCode RHSJacobian(TS, PetscReal, Vec, Mat, Mat, void *);
60 extern PetscErrorCode ExactSolution(PetscReal, Vec, AppCtx *);
61 
62 int main(int argc, char **argv)
63 {
64   AppCtx       appctx;               /* user-defined application context */
65   TS           ts;                   /* timestepping context */
66   Mat          A;                    /* Jacobian matrix data structure */
67   Vec          u;                    /* approximate solution vector */
68   PetscInt     time_steps_max = 100; /* default max timesteps */
69   PetscReal    dt;
70   PetscReal    time_total_max = 100.0; /* default max total time */
71   PetscOptions options, optionscopy;
72 
73   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
74      Initialize program and set problem parameters
75      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
76 
77   PetscFunctionBeginUser;
78   PetscCall(PetscInitialize(&argc, &argv, (char *)0, help));
79 
80   PetscCall(PetscOptionsCreate(&options));
81   PetscCall(PetscOptionsSetValue(options, "-ts_monitor", "ascii"));
82   PetscCall(PetscOptionsSetValue(options, "-snes_monitor", "ascii"));
83   PetscCall(PetscOptionsSetValue(options, "-ksp_monitor", "ascii"));
84 
85   appctx.comm = PETSC_COMM_WORLD;
86   appctx.m    = 60;
87 
88   PetscCall(PetscOptionsGetInt(options, NULL, "-M", &appctx.m, NULL));
89 
90   appctx.h = 1.0 / (appctx.m - 1.0);
91 
92   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
93      Create vector data structures
94      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
95 
96   /*
97      Create distributed array (DMDA) to manage parallel grid and vectors
98      and to set up the ghost point communication pattern.  There are M
99      total grid values spread equally among all the processors.
100   */
101   PetscCall(DMDACreate1d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, appctx.m, 1, 1, NULL, &appctx.da));
102   PetscCall(PetscObjectSetOptions((PetscObject)appctx.da, options));
103   PetscCall(DMSetFromOptions(appctx.da));
104   PetscCall(DMSetUp(appctx.da));
105 
106   /*
107      Extract global and local vectors from DMDA; we use these to store the
108      approximate solution.  Then duplicate these for remaining vectors that
109      have the same types.
110   */
111   PetscCall(DMCreateGlobalVector(appctx.da, &u));
112   PetscCall(DMCreateLocalVector(appctx.da, &appctx.u_local));
113 
114   /*
115      Create local work vector for use in evaluating right-hand-side function;
116      create global work vector for storing exact solution.
117   */
118   PetscCall(VecDuplicate(appctx.u_local, &appctx.localwork));
119   PetscCall(VecDuplicate(u, &appctx.solution));
120 
121   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
122      Create timestepping solver context; set callback routine for
123      right-hand-side function evaluation.
124      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
125 
126   PetscCall(TSCreate(PETSC_COMM_WORLD, &ts));
127   PetscCall(PetscObjectSetOptions((PetscObject)ts, options));
128   PetscCall(TSSetProblemType(ts, TS_NONLINEAR));
129   PetscCall(TSSetRHSFunction(ts, NULL, RHSFunction, &appctx));
130 
131   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
132      For nonlinear problems, the user can provide a Jacobian evaluation
133      routine (or use a finite differencing approximation).
134 
135      Create matrix data structure; set Jacobian evaluation routine.
136      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
137 
138   PetscCall(MatCreate(PETSC_COMM_WORLD, &A));
139   PetscCall(PetscObjectSetOptions((PetscObject)A, options));
140   PetscCall(MatSetSizes(A, PETSC_DECIDE, PETSC_DECIDE, appctx.m, appctx.m));
141   PetscCall(MatSetFromOptions(A));
142   PetscCall(MatSetUp(A));
143   PetscCall(TSSetRHSJacobian(ts, A, A, RHSJacobian, &appctx));
144 
145   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
146      Set solution vector and initial timestep
147      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
148 
149   dt = appctx.h / 2.0;
150   PetscCall(TSSetTimeStep(ts, dt));
151 
152   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
153      Customize timestepping solver:
154        - Set the solution method to be the Backward Euler method.
155        - Set timestepping duration info
156      Then set runtime options, which can override these defaults.
157      For example,
158           -ts_max_steps <maxsteps> -ts_max_time <maxtime>
159      to override the defaults set by TSSetMaxSteps()/TSSetMaxTime().
160      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
161 
162   PetscCall(TSSetType(ts, TSBEULER));
163   PetscCall(TSSetMaxSteps(ts, time_steps_max));
164   PetscCall(TSSetMaxTime(ts, time_total_max));
165   PetscCall(TSSetExactFinalTime(ts, TS_EXACTFINALTIME_STEPOVER));
166   PetscCall(TSSetFromOptions(ts));
167 
168   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
169      Solve the problem
170      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
171 
172   /*
173      Evaluate initial conditions
174   */
175   PetscCall(InitialConditions(u, &appctx));
176 
177   /*
178      Run the timestepping solver
179   */
180   PetscCall(TSSolve(ts, u));
181 
182   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
183      Free work space.  All PETSc objects should be destroyed when they
184      are no longer needed.
185      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
186 
187   PetscCall(PetscObjectGetOptions((PetscObject)ts, &optionscopy));
188   PetscCheck(options == optionscopy, PETSC_COMM_WORLD, PETSC_ERR_PLIB, "PetscObjectGetOptions() failed");
189 
190   PetscCall(TSDestroy(&ts));
191   PetscCall(VecDestroy(&u));
192   PetscCall(MatDestroy(&A));
193   PetscCall(DMDestroy(&appctx.da));
194   PetscCall(VecDestroy(&appctx.localwork));
195   PetscCall(VecDestroy(&appctx.solution));
196   PetscCall(VecDestroy(&appctx.u_local));
197   PetscCall(PetscOptionsDestroy(&options));
198 
199   /*
200      Always call PetscFinalize() before exiting a program.  This routine
201        - finalizes the PETSc libraries as well as MPI
202        - provides summary and diagnostic information if certain runtime
203          options are chosen (e.g., -log_view).
204   */
205   PetscCall(PetscFinalize());
206   return 0;
207 }
208 /* --------------------------------------------------------------------- */
209 /*
210    InitialConditions - Computes the solution at the initial time.
211 
212    Input Parameters:
213    u - uninitialized solution vector (global)
214    appctx - user-defined application context
215 
216    Output Parameter:
217    u - vector with solution at initial time (global)
218 */
219 PetscErrorCode InitialConditions(Vec u, AppCtx *appctx)
220 {
221   PetscScalar *u_localptr, h = appctx->h, x;
222   PetscInt     i, mybase, myend;
223 
224   PetscFunctionBeginUser;
225   /*
226      Determine starting point of each processor's range of
227      grid values.
228   */
229   PetscCall(VecGetOwnershipRange(u, &mybase, &myend));
230 
231   /*
232     Get a pointer to vector data.
233     - For default PETSc vectors, VecGetArray() returns a pointer to
234       the data array.  Otherwise, the routine is implementation dependent.
235     - You MUST call VecRestoreArray() when you no longer need access to
236       the array.
237     - Note that the Fortran interface to VecGetArray() differs from the
238       C version.  See the users manual for details.
239   */
240   PetscCall(VecGetArray(u, &u_localptr));
241 
242   /*
243      We initialize the solution array by simply writing the solution
244      directly into the array locations.  Alternatively, we could use
245      VecSetValues() or VecSetValuesLocal().
246   */
247   for (i = mybase; i < myend; i++) {
248     x                      = h * (PetscReal)i; /* current location in global grid */
249     u_localptr[i - mybase] = 1.0 + x * x;
250   }
251 
252   /*
253      Restore vector
254   */
255   PetscCall(VecRestoreArray(u, &u_localptr));
256 
257   PetscFunctionReturn(PETSC_SUCCESS);
258 }
259 /* --------------------------------------------------------------------- */
260 /*
261    ExactSolution - Computes the exact solution at a given time.
262 
263    Input Parameters:
264    t - current time
265    solution - vector in which exact solution will be computed
266    appctx - user-defined application context
267 
268    Output Parameter:
269    solution - vector with the newly computed exact solution
270 */
271 PetscErrorCode ExactSolution(PetscReal t, Vec solution, AppCtx *appctx)
272 {
273   PetscScalar *s_localptr, h = appctx->h, x;
274   PetscInt     i, mybase, myend;
275 
276   PetscFunctionBeginUser;
277   /*
278      Determine starting and ending points of each processor's
279      range of grid values
280   */
281   PetscCall(VecGetOwnershipRange(solution, &mybase, &myend));
282 
283   /*
284      Get a pointer to vector data.
285   */
286   PetscCall(VecGetArray(solution, &s_localptr));
287 
288   /*
289      Simply write the solution directly into the array locations.
290      Alternatively, we could use VecSetValues() or VecSetValuesLocal().
291   */
292   for (i = mybase; i < myend; i++) {
293     x                      = h * (PetscReal)i;
294     s_localptr[i - mybase] = (t + 1.0) * (1.0 + x * x);
295   }
296 
297   /*
298      Restore vector
299   */
300   PetscCall(VecRestoreArray(solution, &s_localptr));
301   PetscFunctionReturn(PETSC_SUCCESS);
302 }
303 /* --------------------------------------------------------------------- */
304 /*
305    RHSFunction - User-provided routine that evalues the right-hand-side
306    function of the ODE.  This routine is set in the main program by
307    calling TSSetRHSFunction().  We compute:
308           global_out = F(global_in)
309 
310    Input Parameters:
311    ts         - timesteping context
312    t          - current time
313    global_in  - vector containing the current iterate
314    ctx        - (optional) user-provided context for function evaluation.
315                 In this case we use the appctx defined above.
316 
317    Output Parameter:
318    global_out - vector containing the newly evaluated function
319 */
320 PetscErrorCode RHSFunction(TS ts, PetscReal t, Vec global_in, Vec global_out, void *ctx)
321 {
322   AppCtx            *appctx    = (AppCtx *)ctx;     /* user-defined application context */
323   DM                 da        = appctx->da;        /* distributed array */
324   Vec                local_in  = appctx->u_local;   /* local ghosted input vector */
325   Vec                localwork = appctx->localwork; /* local ghosted work vector */
326   PetscInt           i, localsize;
327   PetscMPIInt        rank, size;
328   PetscScalar       *copyptr, sc;
329   const PetscScalar *localptr;
330 
331   PetscFunctionBeginUser;
332   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
333      Get ready for local function computations
334      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
335   /*
336      Scatter ghost points to local vector, using the 2-step process
337         DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
338      By placing code between these two statements, computations can be
339      done while messages are in transition.
340   */
341   PetscCall(DMGlobalToLocalBegin(da, global_in, INSERT_VALUES, local_in));
342   PetscCall(DMGlobalToLocalEnd(da, global_in, INSERT_VALUES, local_in));
343 
344   /*
345       Access directly the values in our local INPUT work array
346   */
347   PetscCall(VecGetArrayRead(local_in, &localptr));
348 
349   /*
350       Access directly the values in our local OUTPUT work array
351   */
352   PetscCall(VecGetArray(localwork, &copyptr));
353 
354   sc = 1.0 / (appctx->h * appctx->h * 2.0 * (1.0 + t) * (1.0 + t));
355 
356   /*
357       Evaluate our function on the nodes owned by this processor
358   */
359   PetscCall(VecGetLocalSize(local_in, &localsize));
360 
361   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
362      Compute entries for the locally owned part
363      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
364 
365   /*
366      Handle boundary conditions: This is done by using the boundary condition
367         u(t,boundary) = g(t,boundary)
368      for some function g. Now take the derivative with respect to t to obtain
369         u_{t}(t,boundary) = g_{t}(t,boundary)
370 
371      In our case, u(t,0) = t + 1, so that u_{t}(t,0) = 1
372              and  u(t,1) = 2t+ 2, so that u_{t}(t,1) = 2
373   */
374   PetscCallMPI(MPI_Comm_rank(appctx->comm, &rank));
375   PetscCallMPI(MPI_Comm_size(appctx->comm, &size));
376   if (rank == 0) copyptr[0] = 1.0;
377   if (rank == size - 1) copyptr[localsize - 1] = 2.0;
378 
379   /*
380      Handle the interior nodes where the PDE is replace by finite
381      difference operators.
382   */
383   for (i = 1; i < localsize - 1; i++) copyptr[i] = localptr[i] * sc * (localptr[i + 1] + localptr[i - 1] - 2.0 * localptr[i]);
384 
385   /*
386      Restore vectors
387   */
388   PetscCall(VecRestoreArrayRead(local_in, &localptr));
389   PetscCall(VecRestoreArray(localwork, &copyptr));
390 
391   /*
392      Insert values from the local OUTPUT vector into the global
393      output vector
394   */
395   PetscCall(DMLocalToGlobalBegin(da, localwork, INSERT_VALUES, global_out));
396   PetscCall(DMLocalToGlobalEnd(da, localwork, INSERT_VALUES, global_out));
397 
398   PetscFunctionReturn(PETSC_SUCCESS);
399 }
400 /* --------------------------------------------------------------------- */
401 /*
402    RHSJacobian - User-provided routine to compute the Jacobian of
403    the nonlinear right-hand-side function of the ODE.
404 
405    Input Parameters:
406    ts - the TS context
407    t - current time
408    global_in - global input vector
409    dummy - optional user-defined context, as set by TSetRHSJacobian()
410 
411    Output Parameters:
412    AA - Jacobian matrix
413    BB - optionally different preconditioning matrix
414    str - flag indicating matrix structure
415 
416   Notes:
417   RHSJacobian computes entries for the locally owned part of the Jacobian.
418    - Currently, all PETSc parallel matrix formats are partitioned by
419      contiguous chunks of rows across the processors.
420    - Each processor needs to insert only elements that it owns
421      locally (but any non-local elements will be sent to the
422      appropriate processor during matrix assembly).
423    - Always specify global row and columns of matrix entries when
424      using MatSetValues().
425    - Here, we set all entries for a particular row at once.
426    - Note that MatSetValues() uses 0-based row and column numbers
427      in Fortran as well as in C.
428 */
429 PetscErrorCode RHSJacobian(TS ts, PetscReal t, Vec global_in, Mat AA, Mat BB, void *ctx)
430 {
431   AppCtx            *appctx   = (AppCtx *)ctx;   /* user-defined application context */
432   Vec                local_in = appctx->u_local; /* local ghosted input vector */
433   DM                 da       = appctx->da;      /* distributed array */
434   PetscScalar        v[3], sc;
435   const PetscScalar *localptr;
436   PetscInt           i, mstart, mend, mstarts, mends, idx[3], is;
437 
438   PetscFunctionBeginUser;
439   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
440      Get ready for local Jacobian computations
441      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
442   /*
443      Scatter ghost points to local vector, using the 2-step process
444         DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
445      By placing code between these two statements, computations can be
446      done while messages are in transition.
447   */
448   PetscCall(DMGlobalToLocalBegin(da, global_in, INSERT_VALUES, local_in));
449   PetscCall(DMGlobalToLocalEnd(da, global_in, INSERT_VALUES, local_in));
450 
451   /*
452      Get pointer to vector data
453   */
454   PetscCall(VecGetArrayRead(local_in, &localptr));
455 
456   /*
457      Get starting and ending locally owned rows of the matrix
458   */
459   PetscCall(MatGetOwnershipRange(BB, &mstarts, &mends));
460   mstart = mstarts;
461   mend   = mends;
462 
463   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
464      Compute entries for the locally owned part of the Jacobian.
465       - Currently, all PETSc parallel matrix formats are partitioned by
466         contiguous chunks of rows across the processors.
467       - Each processor needs to insert only elements that it owns
468         locally (but any non-local elements will be sent to the
469         appropriate processor during matrix assembly).
470       - Here, we set all entries for a particular row at once.
471       - We can set matrix entries either using either
472         MatSetValuesLocal() or MatSetValues().
473      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
474 
475   /*
476      Set matrix rows corresponding to boundary data
477   */
478   if (mstart == 0) {
479     v[0] = 0.0;
480     PetscCall(MatSetValues(BB, 1, &mstart, 1, &mstart, v, INSERT_VALUES));
481     mstart++;
482   }
483   if (mend == appctx->m) {
484     mend--;
485     v[0] = 0.0;
486     PetscCall(MatSetValues(BB, 1, &mend, 1, &mend, v, INSERT_VALUES));
487   }
488 
489   /*
490      Set matrix rows corresponding to interior data.  We construct the
491      matrix one row at a time.
492   */
493   sc = 1.0 / (appctx->h * appctx->h * 2.0 * (1.0 + t) * (1.0 + t));
494   for (i = mstart; i < mend; i++) {
495     idx[0] = i - 1;
496     idx[1] = i;
497     idx[2] = i + 1;
498     is     = i - mstart + 1;
499     v[0]   = sc * localptr[is];
500     v[1]   = sc * (localptr[is + 1] + localptr[is - 1] - 4.0 * localptr[is]);
501     v[2]   = sc * localptr[is];
502     PetscCall(MatSetValues(BB, 1, &i, 3, idx, v, INSERT_VALUES));
503   }
504 
505   /*
506      Restore vector
507   */
508   PetscCall(VecRestoreArrayRead(local_in, &localptr));
509 
510   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
511      Complete the matrix assembly process and set some options
512      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
513   /*
514      Assemble matrix, using the 2-step process:
515        MatAssemblyBegin(), MatAssemblyEnd()
516      Computations can be done while messages are in transition
517      by placing code between these two statements.
518   */
519   PetscCall(MatAssemblyBegin(BB, MAT_FINAL_ASSEMBLY));
520   PetscCall(MatAssemblyEnd(BB, MAT_FINAL_ASSEMBLY));
521   if (BB != AA) {
522     PetscCall(MatAssemblyBegin(AA, MAT_FINAL_ASSEMBLY));
523     PetscCall(MatAssemblyEnd(AA, MAT_FINAL_ASSEMBLY));
524   }
525 
526   /*
527      Set and option to indicate that we will never add a new nonzero location
528      to the matrix. If we do, it will generate an error.
529   */
530   PetscCall(MatSetOption(BB, MAT_NEW_NONZERO_LOCATION_ERR, PETSC_TRUE));
531 
532   PetscFunctionReturn(PETSC_SUCCESS);
533 }
534 
535 /*TEST
536 
537     test:
538       requires: !single
539 
540 TEST*/
541