xref: /petsc/src/ts/tests/ex12.c (revision ebead697dbf761eb322f829370bbe90b3bd93fa3)
1 
2 static char help[] ="Tests PetscObjectSetOptions() for TS object\n\n";
3 
4 /* ------------------------------------------------------------------------
5 
6    This program solves the PDE
7 
8                u * u_xx
9          u_t = ---------
10                2*(t+1)^2
11 
12     on the domain 0 <= x <= 1, with boundary conditions
13          u(t,0) = t + 1,  u(t,1) = 2*t + 2,
14     and initial condition
15          u(0,x) = 1 + x*x.
16 
17     The exact solution is:
18          u(t,x) = (1 + x*x) * (1 + t)
19 
20     Note that since the solution is linear in time and quadratic in x,
21     the finite difference scheme actually computes the "exact" solution.
22 
23     We use by default the backward Euler method.
24 
25   ------------------------------------------------------------------------- */
26 
27 /*
28    Include "petscts.h" to use the PETSc timestepping routines. Note that
29    this file automatically includes "petscsys.h" and other lower-level
30    PETSc include files.
31 
32    Include the "petscdmda.h" to allow us to use the distributed array data
33    structures to manage the parallel grid.
34 */
35 #include <petscts.h>
36 #include <petscdm.h>
37 #include <petscdmda.h>
38 #include <petscdraw.h>
39 
40 /*
41    User-defined application context - contains data needed by the
42    application-provided callback routines.
43 */
44 typedef struct {
45   MPI_Comm  comm;           /* communicator */
46   DM        da;             /* distributed array data structure */
47   Vec       localwork;      /* local ghosted work vector */
48   Vec       u_local;        /* local ghosted approximate solution vector */
49   Vec       solution;       /* global exact solution vector */
50   PetscInt  m;              /* total number of grid points */
51   PetscReal h;              /* mesh width: h = 1/(m-1) */
52 } AppCtx;
53 
54 /*
55    User-defined routines, provided below.
56 */
57 extern PetscErrorCode InitialConditions(Vec,AppCtx*);
58 extern PetscErrorCode RHSFunction(TS,PetscReal,Vec,Vec,void*);
59 extern PetscErrorCode RHSJacobian(TS,PetscReal,Vec,Mat,Mat,void*);
60 extern PetscErrorCode ExactSolution(PetscReal,Vec,AppCtx*);
61 
62 int main(int argc,char **argv)
63 {
64   AppCtx         appctx;                 /* user-defined application context */
65   TS             ts;                     /* timestepping context */
66   Mat            A;                      /* Jacobian matrix data structure */
67   Vec            u;                      /* approximate solution vector */
68   PetscInt       time_steps_max = 100;  /* default max timesteps */
69   PetscReal      dt;
70   PetscReal      time_total_max = 100.0; /* default max total time */
71   PetscOptions   options,optionscopy;
72 
73   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
74      Initialize program and set problem parameters
75      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
76 
77   PetscFunctionBeginUser;
78   PetscCall(PetscInitialize(&argc,&argv,(char*)0,help));
79 
80   PetscCall(PetscOptionsCreate(&options));
81   PetscCall(PetscOptionsSetValue(options,"-ts_monitor","ascii"));
82   PetscCall(PetscOptionsSetValue(options,"-snes_monitor","ascii"));
83   PetscCall(PetscOptionsSetValue(options,"-ksp_monitor","ascii"));
84 
85   appctx.comm = PETSC_COMM_WORLD;
86   appctx.m    = 60;
87 
88   PetscCall(PetscOptionsGetInt(options,NULL,"-M",&appctx.m,NULL));
89 
90   appctx.h    = 1.0/(appctx.m-1.0);
91 
92   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
93      Create vector data structures
94      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
95 
96   /*
97      Create distributed array (DMDA) to manage parallel grid and vectors
98      and to set up the ghost point communication pattern.  There are M
99      total grid values spread equally among all the processors.
100   */
101   PetscCall(DMDACreate1d(PETSC_COMM_WORLD,DM_BOUNDARY_NONE,appctx.m,1,1,NULL,&appctx.da));
102   PetscCall(PetscObjectSetOptions((PetscObject)appctx.da,options));
103   PetscCall(DMSetFromOptions(appctx.da));
104   PetscCall(DMSetUp(appctx.da));
105 
106   /*
107      Extract global and local vectors from DMDA; we use these to store the
108      approximate solution.  Then duplicate these for remaining vectors that
109      have the same types.
110   */
111   PetscCall(DMCreateGlobalVector(appctx.da,&u));
112   PetscCall(DMCreateLocalVector(appctx.da,&appctx.u_local));
113 
114   /*
115      Create local work vector for use in evaluating right-hand-side function;
116      create global work vector for storing exact solution.
117   */
118   PetscCall(VecDuplicate(appctx.u_local,&appctx.localwork));
119   PetscCall(VecDuplicate(u,&appctx.solution));
120 
121   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
122      Create timestepping solver context; set callback routine for
123      right-hand-side function evaluation.
124      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
125 
126   PetscCall(TSCreate(PETSC_COMM_WORLD,&ts));
127   PetscCall(PetscObjectSetOptions((PetscObject)ts,options));
128   PetscCall(TSSetProblemType(ts,TS_NONLINEAR));
129   PetscCall(TSSetRHSFunction(ts,NULL,RHSFunction,&appctx));
130 
131   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
132      For nonlinear problems, the user can provide a Jacobian evaluation
133      routine (or use a finite differencing approximation).
134 
135      Create matrix data structure; set Jacobian evaluation routine.
136      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
137 
138   PetscCall(MatCreate(PETSC_COMM_WORLD,&A));
139   PetscCall(PetscObjectSetOptions((PetscObject)A,options));
140   PetscCall(MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,appctx.m,appctx.m));
141   PetscCall(MatSetFromOptions(A));
142   PetscCall(MatSetUp(A));
143   PetscCall(TSSetRHSJacobian(ts,A,A,RHSJacobian,&appctx));
144 
145   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
146      Set solution vector and initial timestep
147      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
148 
149   dt   = appctx.h/2.0;
150   PetscCall(TSSetTimeStep(ts,dt));
151 
152   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
153      Customize timestepping solver:
154        - Set the solution method to be the Backward Euler method.
155        - Set timestepping duration info
156      Then set runtime options, which can override these defaults.
157      For example,
158           -ts_max_steps <maxsteps> -ts_max_time <maxtime>
159      to override the defaults set by TSSetMaxSteps()/TSSetMaxTime().
160      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
161 
162   PetscCall(TSSetType(ts,TSBEULER));
163   PetscCall(TSSetMaxSteps(ts,time_steps_max));
164   PetscCall(TSSetMaxTime(ts,time_total_max));
165   PetscCall(TSSetExactFinalTime(ts,TS_EXACTFINALTIME_STEPOVER));
166   PetscCall(TSSetFromOptions(ts));
167 
168   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
169      Solve the problem
170      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
171 
172   /*
173      Evaluate initial conditions
174   */
175   PetscCall(InitialConditions(u,&appctx));
176 
177   /*
178      Run the timestepping solver
179   */
180   PetscCall(TSSolve(ts,u));
181 
182   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
183      Free work space.  All PETSc objects should be destroyed when they
184      are no longer needed.
185      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
186 
187   PetscCall(PetscObjectGetOptions((PetscObject)ts,&optionscopy));
188   PetscCheck(options == optionscopy,PETSC_COMM_WORLD,PETSC_ERR_PLIB,"PetscObjectGetOptions() failed");
189 
190   PetscCall(TSDestroy(&ts));
191   PetscCall(VecDestroy(&u));
192   PetscCall(MatDestroy(&A));
193   PetscCall(DMDestroy(&appctx.da));
194   PetscCall(VecDestroy(&appctx.localwork));
195   PetscCall(VecDestroy(&appctx.solution));
196   PetscCall(VecDestroy(&appctx.u_local));
197   PetscCall(PetscOptionsDestroy(&options));
198 
199   /*
200      Always call PetscFinalize() before exiting a program.  This routine
201        - finalizes the PETSc libraries as well as MPI
202        - provides summary and diagnostic information if certain runtime
203          options are chosen (e.g., -log_view).
204   */
205   PetscCall(PetscFinalize());
206   return 0;
207 }
208 /* --------------------------------------------------------------------- */
209 /*
210    InitialConditions - Computes the solution at the initial time.
211 
212    Input Parameters:
213    u - uninitialized solution vector (global)
214    appctx - user-defined application context
215 
216    Output Parameter:
217    u - vector with solution at initial time (global)
218 */
219 PetscErrorCode InitialConditions(Vec u,AppCtx *appctx)
220 {
221   PetscScalar    *u_localptr,h = appctx->h,x;
222   PetscInt       i,mybase,myend;
223 
224   /*
225      Determine starting point of each processor's range of
226      grid values.
227   */
228   PetscCall(VecGetOwnershipRange(u,&mybase,&myend));
229 
230   /*
231     Get a pointer to vector data.
232     - For default PETSc vectors, VecGetArray() returns a pointer to
233       the data array.  Otherwise, the routine is implementation dependent.
234     - You MUST call VecRestoreArray() when you no longer need access to
235       the array.
236     - Note that the Fortran interface to VecGetArray() differs from the
237       C version.  See the users manual for details.
238   */
239   PetscCall(VecGetArray(u,&u_localptr));
240 
241   /*
242      We initialize the solution array by simply writing the solution
243      directly into the array locations.  Alternatively, we could use
244      VecSetValues() or VecSetValuesLocal().
245   */
246   for (i=mybase; i<myend; i++) {
247     x = h*(PetscReal)i; /* current location in global grid */
248     u_localptr[i-mybase] = 1.0 + x*x;
249   }
250 
251   /*
252      Restore vector
253   */
254   PetscCall(VecRestoreArray(u,&u_localptr));
255 
256   return 0;
257 }
258 /* --------------------------------------------------------------------- */
259 /*
260    ExactSolution - Computes the exact solution at a given time.
261 
262    Input Parameters:
263    t - current time
264    solution - vector in which exact solution will be computed
265    appctx - user-defined application context
266 
267    Output Parameter:
268    solution - vector with the newly computed exact solution
269 */
270 PetscErrorCode ExactSolution(PetscReal t,Vec solution,AppCtx *appctx)
271 {
272   PetscScalar    *s_localptr,h = appctx->h,x;
273   PetscInt       i,mybase,myend;
274 
275   /*
276      Determine starting and ending points of each processor's
277      range of grid values
278   */
279   PetscCall(VecGetOwnershipRange(solution,&mybase,&myend));
280 
281   /*
282      Get a pointer to vector data.
283   */
284   PetscCall(VecGetArray(solution,&s_localptr));
285 
286   /*
287      Simply write the solution directly into the array locations.
288      Alternatively, we could use VecSetValues() or VecSetValuesLocal().
289   */
290   for (i=mybase; i<myend; i++) {
291     x = h*(PetscReal)i;
292     s_localptr[i-mybase] = (t + 1.0)*(1.0 + x*x);
293   }
294 
295   /*
296      Restore vector
297   */
298   PetscCall(VecRestoreArray(solution,&s_localptr));
299   return 0;
300 }
301 /* --------------------------------------------------------------------- */
302 /*
303    RHSFunction - User-provided routine that evalues the right-hand-side
304    function of the ODE.  This routine is set in the main program by
305    calling TSSetRHSFunction().  We compute:
306           global_out = F(global_in)
307 
308    Input Parameters:
309    ts         - timesteping context
310    t          - current time
311    global_in  - vector containing the current iterate
312    ctx        - (optional) user-provided context for function evaluation.
313                 In this case we use the appctx defined above.
314 
315    Output Parameter:
316    global_out - vector containing the newly evaluated function
317 */
318 PetscErrorCode RHSFunction(TS ts,PetscReal t,Vec global_in,Vec global_out,void *ctx)
319 {
320   AppCtx            *appctx   = (AppCtx*) ctx;     /* user-defined application context */
321   DM                da        = appctx->da;        /* distributed array */
322   Vec               local_in  = appctx->u_local;   /* local ghosted input vector */
323   Vec               localwork = appctx->localwork; /* local ghosted work vector */
324   PetscInt          i,localsize;
325   PetscMPIInt       rank,size;
326   PetscScalar       *copyptr,sc;
327   const PetscScalar *localptr;
328 
329   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
330      Get ready for local function computations
331      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
332   /*
333      Scatter ghost points to local vector, using the 2-step process
334         DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
335      By placing code between these two statements, computations can be
336      done while messages are in transition.
337   */
338   PetscCall(DMGlobalToLocalBegin(da,global_in,INSERT_VALUES,local_in));
339   PetscCall(DMGlobalToLocalEnd(da,global_in,INSERT_VALUES,local_in));
340 
341   /*
342       Access directly the values in our local INPUT work array
343   */
344   PetscCall(VecGetArrayRead(local_in,&localptr));
345 
346   /*
347       Access directly the values in our local OUTPUT work array
348   */
349   PetscCall(VecGetArray(localwork,&copyptr));
350 
351   sc = 1.0/(appctx->h*appctx->h*2.0*(1.0+t)*(1.0+t));
352 
353   /*
354       Evaluate our function on the nodes owned by this processor
355   */
356   PetscCall(VecGetLocalSize(local_in,&localsize));
357 
358   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
359      Compute entries for the locally owned part
360      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
361 
362   /*
363      Handle boundary conditions: This is done by using the boundary condition
364         u(t,boundary) = g(t,boundary)
365      for some function g. Now take the derivative with respect to t to obtain
366         u_{t}(t,boundary) = g_{t}(t,boundary)
367 
368      In our case, u(t,0) = t + 1, so that u_{t}(t,0) = 1
369              and  u(t,1) = 2t+ 2, so that u_{t}(t,1) = 2
370   */
371   PetscCallMPI(MPI_Comm_rank(appctx->comm,&rank));
372   PetscCallMPI(MPI_Comm_size(appctx->comm,&size));
373   if (rank == 0)          copyptr[0]           = 1.0;
374   if (rank == size-1) copyptr[localsize-1] = 2.0;
375 
376   /*
377      Handle the interior nodes where the PDE is replace by finite
378      difference operators.
379   */
380   for (i=1; i<localsize-1; i++) copyptr[i] =  localptr[i] * sc * (localptr[i+1] + localptr[i-1] - 2.0*localptr[i]);
381 
382   /*
383      Restore vectors
384   */
385   PetscCall(VecRestoreArrayRead(local_in,&localptr));
386   PetscCall(VecRestoreArray(localwork,&copyptr));
387 
388   /*
389      Insert values from the local OUTPUT vector into the global
390      output vector
391   */
392   PetscCall(DMLocalToGlobalBegin(da,localwork,INSERT_VALUES,global_out));
393   PetscCall(DMLocalToGlobalEnd(da,localwork,INSERT_VALUES,global_out));
394 
395   return 0;
396 }
397 /* --------------------------------------------------------------------- */
398 /*
399    RHSJacobian - User-provided routine to compute the Jacobian of
400    the nonlinear right-hand-side function of the ODE.
401 
402    Input Parameters:
403    ts - the TS context
404    t - current time
405    global_in - global input vector
406    dummy - optional user-defined context, as set by TSetRHSJacobian()
407 
408    Output Parameters:
409    AA - Jacobian matrix
410    BB - optionally different preconditioning matrix
411    str - flag indicating matrix structure
412 
413   Notes:
414   RHSJacobian computes entries for the locally owned part of the Jacobian.
415    - Currently, all PETSc parallel matrix formats are partitioned by
416      contiguous chunks of rows across the processors.
417    - Each processor needs to insert only elements that it owns
418      locally (but any non-local elements will be sent to the
419      appropriate processor during matrix assembly).
420    - Always specify global row and columns of matrix entries when
421      using MatSetValues().
422    - Here, we set all entries for a particular row at once.
423    - Note that MatSetValues() uses 0-based row and column numbers
424      in Fortran as well as in C.
425 */
426 PetscErrorCode RHSJacobian(TS ts,PetscReal t,Vec global_in,Mat AA,Mat BB,void *ctx)
427 {
428   AppCtx            *appctx  = (AppCtx*)ctx;    /* user-defined application context */
429   Vec               local_in = appctx->u_local;   /* local ghosted input vector */
430   DM                da       = appctx->da;        /* distributed array */
431   PetscScalar       v[3],sc;
432   const PetscScalar *localptr;
433   PetscInt          i,mstart,mend,mstarts,mends,idx[3],is;
434 
435   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
436      Get ready for local Jacobian computations
437      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
438   /*
439      Scatter ghost points to local vector, using the 2-step process
440         DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
441      By placing code between these two statements, computations can be
442      done while messages are in transition.
443   */
444   PetscCall(DMGlobalToLocalBegin(da,global_in,INSERT_VALUES,local_in));
445   PetscCall(DMGlobalToLocalEnd(da,global_in,INSERT_VALUES,local_in));
446 
447   /*
448      Get pointer to vector data
449   */
450   PetscCall(VecGetArrayRead(local_in,&localptr));
451 
452   /*
453      Get starting and ending locally owned rows of the matrix
454   */
455   PetscCall(MatGetOwnershipRange(BB,&mstarts,&mends));
456   mstart = mstarts; mend = mends;
457 
458   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
459      Compute entries for the locally owned part of the Jacobian.
460       - Currently, all PETSc parallel matrix formats are partitioned by
461         contiguous chunks of rows across the processors.
462       - Each processor needs to insert only elements that it owns
463         locally (but any non-local elements will be sent to the
464         appropriate processor during matrix assembly).
465       - Here, we set all entries for a particular row at once.
466       - We can set matrix entries either using either
467         MatSetValuesLocal() or MatSetValues().
468      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
469 
470   /*
471      Set matrix rows corresponding to boundary data
472   */
473   if (mstart == 0) {
474     v[0] = 0.0;
475     PetscCall(MatSetValues(BB,1,&mstart,1,&mstart,v,INSERT_VALUES));
476     mstart++;
477   }
478   if (mend == appctx->m) {
479     mend--;
480     v[0] = 0.0;
481     PetscCall(MatSetValues(BB,1,&mend,1,&mend,v,INSERT_VALUES));
482   }
483 
484   /*
485      Set matrix rows corresponding to interior data.  We construct the
486      matrix one row at a time.
487   */
488   sc = 1.0/(appctx->h*appctx->h*2.0*(1.0+t)*(1.0+t));
489   for (i=mstart; i<mend; i++) {
490     idx[0] = i-1; idx[1] = i; idx[2] = i+1;
491     is     = i - mstart + 1;
492     v[0]   = sc*localptr[is];
493     v[1]   = sc*(localptr[is+1] + localptr[is-1] - 4.0*localptr[is]);
494     v[2]   = sc*localptr[is];
495     PetscCall(MatSetValues(BB,1,&i,3,idx,v,INSERT_VALUES));
496   }
497 
498   /*
499      Restore vector
500   */
501   PetscCall(VecRestoreArrayRead(local_in,&localptr));
502 
503   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
504      Complete the matrix assembly process and set some options
505      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
506   /*
507      Assemble matrix, using the 2-step process:
508        MatAssemblyBegin(), MatAssemblyEnd()
509      Computations can be done while messages are in transition
510      by placing code between these two statements.
511   */
512   PetscCall(MatAssemblyBegin(BB,MAT_FINAL_ASSEMBLY));
513   PetscCall(MatAssemblyEnd(BB,MAT_FINAL_ASSEMBLY));
514   if (BB != AA) {
515     PetscCall(MatAssemblyBegin(AA,MAT_FINAL_ASSEMBLY));
516     PetscCall(MatAssemblyEnd(AA,MAT_FINAL_ASSEMBLY));
517   }
518 
519   /*
520      Set and option to indicate that we will never add a new nonzero location
521      to the matrix. If we do, it will generate an error.
522   */
523   PetscCall(MatSetOption(BB,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE));
524 
525   return 0;
526 }
527 
528 /*TEST
529 
530     test:
531       requires: !single
532 
533 TEST*/
534