xref: /petsc/src/ts/tests/ex12.c (revision 40badf4fbc550ac1f60bd080eaff6de6d55b946d)
1 
2 static char help[] ="Tests PetscObjectSetOptions() for TS object\n\n";
3 
4 /*
5    Concepts: TS^time-dependent nonlinear problems
6    Processors: n
7 */
8 
9 /* ------------------------------------------------------------------------
10 
11    This program solves the PDE
12 
13                u * u_xx
14          u_t = ---------
15                2*(t+1)^2
16 
17     on the domain 0 <= x <= 1, with boundary conditions
18          u(t,0) = t + 1,  u(t,1) = 2*t + 2,
19     and initial condition
20          u(0,x) = 1 + x*x.
21 
22     The exact solution is:
23          u(t,x) = (1 + x*x) * (1 + t)
24 
25     Note that since the solution is linear in time and quadratic in x,
26     the finite difference scheme actually computes the "exact" solution.
27 
28     We use by default the backward Euler method.
29 
30   ------------------------------------------------------------------------- */
31 
32 /*
33    Include "petscts.h" to use the PETSc timestepping routines. Note that
34    this file automatically includes "petscsys.h" and other lower-level
35    PETSc include files.
36 
37    Include the "petscdmda.h" to allow us to use the distributed array data
38    structures to manage the parallel grid.
39 */
40 #include <petscts.h>
41 #include <petscdm.h>
42 #include <petscdmda.h>
43 #include <petscdraw.h>
44 
45 /*
46    User-defined application context - contains data needed by the
47    application-provided callback routines.
48 */
49 typedef struct {
50   MPI_Comm  comm;           /* communicator */
51   DM        da;             /* distributed array data structure */
52   Vec       localwork;      /* local ghosted work vector */
53   Vec       u_local;        /* local ghosted approximate solution vector */
54   Vec       solution;       /* global exact solution vector */
55   PetscInt  m;              /* total number of grid points */
56   PetscReal h;              /* mesh width: h = 1/(m-1) */
57 } AppCtx;
58 
59 /*
60    User-defined routines, provided below.
61 */
62 extern PetscErrorCode InitialConditions(Vec,AppCtx*);
63 extern PetscErrorCode RHSFunction(TS,PetscReal,Vec,Vec,void*);
64 extern PetscErrorCode RHSJacobian(TS,PetscReal,Vec,Mat,Mat,void*);
65 extern PetscErrorCode ExactSolution(PetscReal,Vec,AppCtx*);
66 
67 int main(int argc,char **argv)
68 {
69   AppCtx         appctx;                 /* user-defined application context */
70   TS             ts;                     /* timestepping context */
71   Mat            A;                      /* Jacobian matrix data structure */
72   Vec            u;                      /* approximate solution vector */
73   PetscInt       time_steps_max = 100;  /* default max timesteps */
74   PetscErrorCode ierr;
75   PetscReal      dt;
76   PetscReal      time_total_max = 100.0; /* default max total time */
77   PetscOptions   options,optionscopy;
78 
79   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
80      Initialize program and set problem parameters
81      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
82 
83   ierr = PetscInitialize(&argc,&argv,(char*)0,help);if (ierr) return ierr;
84 
85   CHKERRQ(PetscOptionsCreate(&options));
86   CHKERRQ(PetscOptionsSetValue(options,"-ts_monitor","ascii"));
87   CHKERRQ(PetscOptionsSetValue(options,"-snes_monitor","ascii"));
88   CHKERRQ(PetscOptionsSetValue(options,"-ksp_monitor","ascii"));
89 
90   appctx.comm = PETSC_COMM_WORLD;
91   appctx.m    = 60;
92 
93   CHKERRQ(PetscOptionsGetInt(options,NULL,"-M",&appctx.m,NULL));
94 
95   appctx.h    = 1.0/(appctx.m-1.0);
96 
97   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
98      Create vector data structures
99      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
100 
101   /*
102      Create distributed array (DMDA) to manage parallel grid and vectors
103      and to set up the ghost point communication pattern.  There are M
104      total grid values spread equally among all the processors.
105   */
106   CHKERRQ(DMDACreate1d(PETSC_COMM_WORLD,DM_BOUNDARY_NONE,appctx.m,1,1,NULL,&appctx.da));
107   CHKERRQ(PetscObjectSetOptions((PetscObject)appctx.da,options));
108   CHKERRQ(DMSetFromOptions(appctx.da));
109   CHKERRQ(DMSetUp(appctx.da));
110 
111   /*
112      Extract global and local vectors from DMDA; we use these to store the
113      approximate solution.  Then duplicate these for remaining vectors that
114      have the same types.
115   */
116   CHKERRQ(DMCreateGlobalVector(appctx.da,&u));
117   CHKERRQ(DMCreateLocalVector(appctx.da,&appctx.u_local));
118 
119   /*
120      Create local work vector for use in evaluating right-hand-side function;
121      create global work vector for storing exact solution.
122   */
123   CHKERRQ(VecDuplicate(appctx.u_local,&appctx.localwork));
124   CHKERRQ(VecDuplicate(u,&appctx.solution));
125 
126   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
127      Create timestepping solver context; set callback routine for
128      right-hand-side function evaluation.
129      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
130 
131   CHKERRQ(TSCreate(PETSC_COMM_WORLD,&ts));
132   CHKERRQ(PetscObjectSetOptions((PetscObject)ts,options));
133   CHKERRQ(TSSetProblemType(ts,TS_NONLINEAR));
134   CHKERRQ(TSSetRHSFunction(ts,NULL,RHSFunction,&appctx));
135 
136   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
137      For nonlinear problems, the user can provide a Jacobian evaluation
138      routine (or use a finite differencing approximation).
139 
140      Create matrix data structure; set Jacobian evaluation routine.
141      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
142 
143   CHKERRQ(MatCreate(PETSC_COMM_WORLD,&A));
144   CHKERRQ(PetscObjectSetOptions((PetscObject)A,options));
145   CHKERRQ(MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,appctx.m,appctx.m));
146   CHKERRQ(MatSetFromOptions(A));
147   CHKERRQ(MatSetUp(A));
148   CHKERRQ(TSSetRHSJacobian(ts,A,A,RHSJacobian,&appctx));
149 
150   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
151      Set solution vector and initial timestep
152      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
153 
154   dt   = appctx.h/2.0;
155   CHKERRQ(TSSetTimeStep(ts,dt));
156 
157   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
158      Customize timestepping solver:
159        - Set the solution method to be the Backward Euler method.
160        - Set timestepping duration info
161      Then set runtime options, which can override these defaults.
162      For example,
163           -ts_max_steps <maxsteps> -ts_max_time <maxtime>
164      to override the defaults set by TSSetMaxSteps()/TSSetMaxTime().
165      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
166 
167   CHKERRQ(TSSetType(ts,TSBEULER));
168   CHKERRQ(TSSetMaxSteps(ts,time_steps_max));
169   CHKERRQ(TSSetMaxTime(ts,time_total_max));
170   CHKERRQ(TSSetExactFinalTime(ts,TS_EXACTFINALTIME_STEPOVER));
171   CHKERRQ(TSSetFromOptions(ts));
172 
173   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
174      Solve the problem
175      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
176 
177   /*
178      Evaluate initial conditions
179   */
180   CHKERRQ(InitialConditions(u,&appctx));
181 
182   /*
183      Run the timestepping solver
184   */
185   CHKERRQ(TSSolve(ts,u));
186 
187   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
188      Free work space.  All PETSc objects should be destroyed when they
189      are no longer needed.
190      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
191 
192   CHKERRQ(PetscObjectGetOptions((PetscObject)ts,&optionscopy));
193   PetscCheck(options == optionscopy,PETSC_COMM_WORLD,PETSC_ERR_PLIB,"PetscObjectGetOptions() failed");
194 
195   CHKERRQ(TSDestroy(&ts));
196   CHKERRQ(VecDestroy(&u));
197   CHKERRQ(MatDestroy(&A));
198   CHKERRQ(DMDestroy(&appctx.da));
199   CHKERRQ(VecDestroy(&appctx.localwork));
200   CHKERRQ(VecDestroy(&appctx.solution));
201   CHKERRQ(VecDestroy(&appctx.u_local));
202   CHKERRQ(PetscOptionsDestroy(&options));
203 
204   /*
205      Always call PetscFinalize() before exiting a program.  This routine
206        - finalizes the PETSc libraries as well as MPI
207        - provides summary and diagnostic information if certain runtime
208          options are chosen (e.g., -log_view).
209   */
210   ierr = PetscFinalize();
211   return ierr;
212 }
213 /* --------------------------------------------------------------------- */
214 /*
215    InitialConditions - Computes the solution at the initial time.
216 
217    Input Parameters:
218    u - uninitialized solution vector (global)
219    appctx - user-defined application context
220 
221    Output Parameter:
222    u - vector with solution at initial time (global)
223 */
224 PetscErrorCode InitialConditions(Vec u,AppCtx *appctx)
225 {
226   PetscScalar    *u_localptr,h = appctx->h,x;
227   PetscInt       i,mybase,myend;
228 
229   /*
230      Determine starting point of each processor's range of
231      grid values.
232   */
233   CHKERRQ(VecGetOwnershipRange(u,&mybase,&myend));
234 
235   /*
236     Get a pointer to vector data.
237     - For default PETSc vectors, VecGetArray() returns a pointer to
238       the data array.  Otherwise, the routine is implementation dependent.
239     - You MUST call VecRestoreArray() when you no longer need access to
240       the array.
241     - Note that the Fortran interface to VecGetArray() differs from the
242       C version.  See the users manual for details.
243   */
244   CHKERRQ(VecGetArray(u,&u_localptr));
245 
246   /*
247      We initialize the solution array by simply writing the solution
248      directly into the array locations.  Alternatively, we could use
249      VecSetValues() or VecSetValuesLocal().
250   */
251   for (i=mybase; i<myend; i++) {
252     x = h*(PetscReal)i; /* current location in global grid */
253     u_localptr[i-mybase] = 1.0 + x*x;
254   }
255 
256   /*
257      Restore vector
258   */
259   CHKERRQ(VecRestoreArray(u,&u_localptr));
260 
261   return 0;
262 }
263 /* --------------------------------------------------------------------- */
264 /*
265    ExactSolution - Computes the exact solution at a given time.
266 
267    Input Parameters:
268    t - current time
269    solution - vector in which exact solution will be computed
270    appctx - user-defined application context
271 
272    Output Parameter:
273    solution - vector with the newly computed exact solution
274 */
275 PetscErrorCode ExactSolution(PetscReal t,Vec solution,AppCtx *appctx)
276 {
277   PetscScalar    *s_localptr,h = appctx->h,x;
278   PetscInt       i,mybase,myend;
279 
280   /*
281      Determine starting and ending points of each processor's
282      range of grid values
283   */
284   CHKERRQ(VecGetOwnershipRange(solution,&mybase,&myend));
285 
286   /*
287      Get a pointer to vector data.
288   */
289   CHKERRQ(VecGetArray(solution,&s_localptr));
290 
291   /*
292      Simply write the solution directly into the array locations.
293      Alternatively, we could use VecSetValues() or VecSetValuesLocal().
294   */
295   for (i=mybase; i<myend; i++) {
296     x = h*(PetscReal)i;
297     s_localptr[i-mybase] = (t + 1.0)*(1.0 + x*x);
298   }
299 
300   /*
301      Restore vector
302   */
303   CHKERRQ(VecRestoreArray(solution,&s_localptr));
304   return 0;
305 }
306 /* --------------------------------------------------------------------- */
307 /*
308    RHSFunction - User-provided routine that evalues the right-hand-side
309    function of the ODE.  This routine is set in the main program by
310    calling TSSetRHSFunction().  We compute:
311           global_out = F(global_in)
312 
313    Input Parameters:
314    ts         - timesteping context
315    t          - current time
316    global_in  - vector containing the current iterate
317    ctx        - (optional) user-provided context for function evaluation.
318                 In this case we use the appctx defined above.
319 
320    Output Parameter:
321    global_out - vector containing the newly evaluated function
322 */
323 PetscErrorCode RHSFunction(TS ts,PetscReal t,Vec global_in,Vec global_out,void *ctx)
324 {
325   AppCtx            *appctx   = (AppCtx*) ctx;     /* user-defined application context */
326   DM                da        = appctx->da;        /* distributed array */
327   Vec               local_in  = appctx->u_local;   /* local ghosted input vector */
328   Vec               localwork = appctx->localwork; /* local ghosted work vector */
329   PetscInt          i,localsize;
330   PetscMPIInt       rank,size;
331   PetscScalar       *copyptr,sc;
332   const PetscScalar *localptr;
333 
334   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
335      Get ready for local function computations
336      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
337   /*
338      Scatter ghost points to local vector, using the 2-step process
339         DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
340      By placing code between these two statements, computations can be
341      done while messages are in transition.
342   */
343   CHKERRQ(DMGlobalToLocalBegin(da,global_in,INSERT_VALUES,local_in));
344   CHKERRQ(DMGlobalToLocalEnd(da,global_in,INSERT_VALUES,local_in));
345 
346   /*
347       Access directly the values in our local INPUT work array
348   */
349   CHKERRQ(VecGetArrayRead(local_in,&localptr));
350 
351   /*
352       Access directly the values in our local OUTPUT work array
353   */
354   CHKERRQ(VecGetArray(localwork,&copyptr));
355 
356   sc = 1.0/(appctx->h*appctx->h*2.0*(1.0+t)*(1.0+t));
357 
358   /*
359       Evaluate our function on the nodes owned by this processor
360   */
361   CHKERRQ(VecGetLocalSize(local_in,&localsize));
362 
363   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
364      Compute entries for the locally owned part
365      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
366 
367   /*
368      Handle boundary conditions: This is done by using the boundary condition
369         u(t,boundary) = g(t,boundary)
370      for some function g. Now take the derivative with respect to t to obtain
371         u_{t}(t,boundary) = g_{t}(t,boundary)
372 
373      In our case, u(t,0) = t + 1, so that u_{t}(t,0) = 1
374              and  u(t,1) = 2t+ 2, so that u_{t}(t,1) = 2
375   */
376   CHKERRMPI(MPI_Comm_rank(appctx->comm,&rank));
377   CHKERRMPI(MPI_Comm_size(appctx->comm,&size));
378   if (rank == 0)          copyptr[0]           = 1.0;
379   if (rank == size-1) copyptr[localsize-1] = 2.0;
380 
381   /*
382      Handle the interior nodes where the PDE is replace by finite
383      difference operators.
384   */
385   for (i=1; i<localsize-1; i++) copyptr[i] =  localptr[i] * sc * (localptr[i+1] + localptr[i-1] - 2.0*localptr[i]);
386 
387   /*
388      Restore vectors
389   */
390   CHKERRQ(VecRestoreArrayRead(local_in,&localptr));
391   CHKERRQ(VecRestoreArray(localwork,&copyptr));
392 
393   /*
394      Insert values from the local OUTPUT vector into the global
395      output vector
396   */
397   CHKERRQ(DMLocalToGlobalBegin(da,localwork,INSERT_VALUES,global_out));
398   CHKERRQ(DMLocalToGlobalEnd(da,localwork,INSERT_VALUES,global_out));
399 
400   return 0;
401 }
402 /* --------------------------------------------------------------------- */
403 /*
404    RHSJacobian - User-provided routine to compute the Jacobian of
405    the nonlinear right-hand-side function of the ODE.
406 
407    Input Parameters:
408    ts - the TS context
409    t - current time
410    global_in - global input vector
411    dummy - optional user-defined context, as set by TSetRHSJacobian()
412 
413    Output Parameters:
414    AA - Jacobian matrix
415    BB - optionally different preconditioning matrix
416    str - flag indicating matrix structure
417 
418   Notes:
419   RHSJacobian computes entries for the locally owned part of the Jacobian.
420    - Currently, all PETSc parallel matrix formats are partitioned by
421      contiguous chunks of rows across the processors.
422    - Each processor needs to insert only elements that it owns
423      locally (but any non-local elements will be sent to the
424      appropriate processor during matrix assembly).
425    - Always specify global row and columns of matrix entries when
426      using MatSetValues().
427    - Here, we set all entries for a particular row at once.
428    - Note that MatSetValues() uses 0-based row and column numbers
429      in Fortran as well as in C.
430 */
431 PetscErrorCode RHSJacobian(TS ts,PetscReal t,Vec global_in,Mat AA,Mat BB,void *ctx)
432 {
433   AppCtx            *appctx  = (AppCtx*)ctx;    /* user-defined application context */
434   Vec               local_in = appctx->u_local;   /* local ghosted input vector */
435   DM                da       = appctx->da;        /* distributed array */
436   PetscScalar       v[3],sc;
437   const PetscScalar *localptr;
438   PetscInt          i,mstart,mend,mstarts,mends,idx[3],is;
439 
440   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
441      Get ready for local Jacobian computations
442      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
443   /*
444      Scatter ghost points to local vector, using the 2-step process
445         DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
446      By placing code between these two statements, computations can be
447      done while messages are in transition.
448   */
449   CHKERRQ(DMGlobalToLocalBegin(da,global_in,INSERT_VALUES,local_in));
450   CHKERRQ(DMGlobalToLocalEnd(da,global_in,INSERT_VALUES,local_in));
451 
452   /*
453      Get pointer to vector data
454   */
455   CHKERRQ(VecGetArrayRead(local_in,&localptr));
456 
457   /*
458      Get starting and ending locally owned rows of the matrix
459   */
460   CHKERRQ(MatGetOwnershipRange(BB,&mstarts,&mends));
461   mstart = mstarts; mend = mends;
462 
463   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
464      Compute entries for the locally owned part of the Jacobian.
465       - Currently, all PETSc parallel matrix formats are partitioned by
466         contiguous chunks of rows across the processors.
467       - Each processor needs to insert only elements that it owns
468         locally (but any non-local elements will be sent to the
469         appropriate processor during matrix assembly).
470       - Here, we set all entries for a particular row at once.
471       - We can set matrix entries either using either
472         MatSetValuesLocal() or MatSetValues().
473      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
474 
475   /*
476      Set matrix rows corresponding to boundary data
477   */
478   if (mstart == 0) {
479     v[0] = 0.0;
480     CHKERRQ(MatSetValues(BB,1,&mstart,1,&mstart,v,INSERT_VALUES));
481     mstart++;
482   }
483   if (mend == appctx->m) {
484     mend--;
485     v[0] = 0.0;
486     CHKERRQ(MatSetValues(BB,1,&mend,1,&mend,v,INSERT_VALUES));
487   }
488 
489   /*
490      Set matrix rows corresponding to interior data.  We construct the
491      matrix one row at a time.
492   */
493   sc = 1.0/(appctx->h*appctx->h*2.0*(1.0+t)*(1.0+t));
494   for (i=mstart; i<mend; i++) {
495     idx[0] = i-1; idx[1] = i; idx[2] = i+1;
496     is     = i - mstart + 1;
497     v[0]   = sc*localptr[is];
498     v[1]   = sc*(localptr[is+1] + localptr[is-1] - 4.0*localptr[is]);
499     v[2]   = sc*localptr[is];
500     CHKERRQ(MatSetValues(BB,1,&i,3,idx,v,INSERT_VALUES));
501   }
502 
503   /*
504      Restore vector
505   */
506   CHKERRQ(VecRestoreArrayRead(local_in,&localptr));
507 
508   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
509      Complete the matrix assembly process and set some options
510      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
511   /*
512      Assemble matrix, using the 2-step process:
513        MatAssemblyBegin(), MatAssemblyEnd()
514      Computations can be done while messages are in transition
515      by placing code between these two statements.
516   */
517   CHKERRQ(MatAssemblyBegin(BB,MAT_FINAL_ASSEMBLY));
518   CHKERRQ(MatAssemblyEnd(BB,MAT_FINAL_ASSEMBLY));
519   if (BB != AA) {
520     CHKERRQ(MatAssemblyBegin(AA,MAT_FINAL_ASSEMBLY));
521     CHKERRQ(MatAssemblyEnd(AA,MAT_FINAL_ASSEMBLY));
522   }
523 
524   /*
525      Set and option to indicate that we will never add a new nonzero location
526      to the matrix. If we do, it will generate an error.
527   */
528   CHKERRQ(MatSetOption(BB,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE));
529 
530   return 0;
531 }
532 
533 /*TEST
534 
535     test:
536       requires: !single
537 
538 TEST*/
539