1 2 static char help[] ="Tests PetscObjectSetOptions() for TS object\n\n"; 3 4 /* 5 Concepts: TS^time-dependent nonlinear problems 6 Processors: n 7 */ 8 9 /* ------------------------------------------------------------------------ 10 11 This program solves the PDE 12 13 u * u_xx 14 u_t = --------- 15 2*(t+1)^2 16 17 on the domain 0 <= x <= 1, with boundary conditions 18 u(t,0) = t + 1, u(t,1) = 2*t + 2, 19 and initial condition 20 u(0,x) = 1 + x*x. 21 22 The exact solution is: 23 u(t,x) = (1 + x*x) * (1 + t) 24 25 Note that since the solution is linear in time and quadratic in x, 26 the finite difference scheme actually computes the "exact" solution. 27 28 We use by default the backward Euler method. 29 30 ------------------------------------------------------------------------- */ 31 32 /* 33 Include "petscts.h" to use the PETSc timestepping routines. Note that 34 this file automatically includes "petscsys.h" and other lower-level 35 PETSc include files. 36 37 Include the "petscdmda.h" to allow us to use the distributed array data 38 structures to manage the parallel grid. 39 */ 40 #include <petscts.h> 41 #include <petscdm.h> 42 #include <petscdmda.h> 43 #include <petscdraw.h> 44 45 /* 46 User-defined application context - contains data needed by the 47 application-provided callback routines. 48 */ 49 typedef struct { 50 MPI_Comm comm; /* communicator */ 51 DM da; /* distributed array data structure */ 52 Vec localwork; /* local ghosted work vector */ 53 Vec u_local; /* local ghosted approximate solution vector */ 54 Vec solution; /* global exact solution vector */ 55 PetscInt m; /* total number of grid points */ 56 PetscReal h; /* mesh width: h = 1/(m-1) */ 57 } AppCtx; 58 59 /* 60 User-defined routines, provided below. 61 */ 62 extern PetscErrorCode InitialConditions(Vec,AppCtx*); 63 extern PetscErrorCode RHSFunction(TS,PetscReal,Vec,Vec,void*); 64 extern PetscErrorCode RHSJacobian(TS,PetscReal,Vec,Mat,Mat,void*); 65 extern PetscErrorCode ExactSolution(PetscReal,Vec,AppCtx*); 66 67 int main(int argc,char **argv) 68 { 69 AppCtx appctx; /* user-defined application context */ 70 TS ts; /* timestepping context */ 71 Mat A; /* Jacobian matrix data structure */ 72 Vec u; /* approximate solution vector */ 73 PetscInt time_steps_max = 100; /* default max timesteps */ 74 PetscReal dt; 75 PetscReal time_total_max = 100.0; /* default max total time */ 76 PetscOptions options,optionscopy; 77 78 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 79 Initialize program and set problem parameters 80 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 81 82 PetscCall(PetscInitialize(&argc,&argv,(char*)0,help)); 83 84 PetscCall(PetscOptionsCreate(&options)); 85 PetscCall(PetscOptionsSetValue(options,"-ts_monitor","ascii")); 86 PetscCall(PetscOptionsSetValue(options,"-snes_monitor","ascii")); 87 PetscCall(PetscOptionsSetValue(options,"-ksp_monitor","ascii")); 88 89 appctx.comm = PETSC_COMM_WORLD; 90 appctx.m = 60; 91 92 PetscCall(PetscOptionsGetInt(options,NULL,"-M",&appctx.m,NULL)); 93 94 appctx.h = 1.0/(appctx.m-1.0); 95 96 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 97 Create vector data structures 98 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 99 100 /* 101 Create distributed array (DMDA) to manage parallel grid and vectors 102 and to set up the ghost point communication pattern. There are M 103 total grid values spread equally among all the processors. 104 */ 105 PetscCall(DMDACreate1d(PETSC_COMM_WORLD,DM_BOUNDARY_NONE,appctx.m,1,1,NULL,&appctx.da)); 106 PetscCall(PetscObjectSetOptions((PetscObject)appctx.da,options)); 107 PetscCall(DMSetFromOptions(appctx.da)); 108 PetscCall(DMSetUp(appctx.da)); 109 110 /* 111 Extract global and local vectors from DMDA; we use these to store the 112 approximate solution. Then duplicate these for remaining vectors that 113 have the same types. 114 */ 115 PetscCall(DMCreateGlobalVector(appctx.da,&u)); 116 PetscCall(DMCreateLocalVector(appctx.da,&appctx.u_local)); 117 118 /* 119 Create local work vector for use in evaluating right-hand-side function; 120 create global work vector for storing exact solution. 121 */ 122 PetscCall(VecDuplicate(appctx.u_local,&appctx.localwork)); 123 PetscCall(VecDuplicate(u,&appctx.solution)); 124 125 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 126 Create timestepping solver context; set callback routine for 127 right-hand-side function evaluation. 128 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 129 130 PetscCall(TSCreate(PETSC_COMM_WORLD,&ts)); 131 PetscCall(PetscObjectSetOptions((PetscObject)ts,options)); 132 PetscCall(TSSetProblemType(ts,TS_NONLINEAR)); 133 PetscCall(TSSetRHSFunction(ts,NULL,RHSFunction,&appctx)); 134 135 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 136 For nonlinear problems, the user can provide a Jacobian evaluation 137 routine (or use a finite differencing approximation). 138 139 Create matrix data structure; set Jacobian evaluation routine. 140 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 141 142 PetscCall(MatCreate(PETSC_COMM_WORLD,&A)); 143 PetscCall(PetscObjectSetOptions((PetscObject)A,options)); 144 PetscCall(MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,appctx.m,appctx.m)); 145 PetscCall(MatSetFromOptions(A)); 146 PetscCall(MatSetUp(A)); 147 PetscCall(TSSetRHSJacobian(ts,A,A,RHSJacobian,&appctx)); 148 149 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 150 Set solution vector and initial timestep 151 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 152 153 dt = appctx.h/2.0; 154 PetscCall(TSSetTimeStep(ts,dt)); 155 156 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 157 Customize timestepping solver: 158 - Set the solution method to be the Backward Euler method. 159 - Set timestepping duration info 160 Then set runtime options, which can override these defaults. 161 For example, 162 -ts_max_steps <maxsteps> -ts_max_time <maxtime> 163 to override the defaults set by TSSetMaxSteps()/TSSetMaxTime(). 164 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 165 166 PetscCall(TSSetType(ts,TSBEULER)); 167 PetscCall(TSSetMaxSteps(ts,time_steps_max)); 168 PetscCall(TSSetMaxTime(ts,time_total_max)); 169 PetscCall(TSSetExactFinalTime(ts,TS_EXACTFINALTIME_STEPOVER)); 170 PetscCall(TSSetFromOptions(ts)); 171 172 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 173 Solve the problem 174 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 175 176 /* 177 Evaluate initial conditions 178 */ 179 PetscCall(InitialConditions(u,&appctx)); 180 181 /* 182 Run the timestepping solver 183 */ 184 PetscCall(TSSolve(ts,u)); 185 186 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 187 Free work space. All PETSc objects should be destroyed when they 188 are no longer needed. 189 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 190 191 PetscCall(PetscObjectGetOptions((PetscObject)ts,&optionscopy)); 192 PetscCheck(options == optionscopy,PETSC_COMM_WORLD,PETSC_ERR_PLIB,"PetscObjectGetOptions() failed"); 193 194 PetscCall(TSDestroy(&ts)); 195 PetscCall(VecDestroy(&u)); 196 PetscCall(MatDestroy(&A)); 197 PetscCall(DMDestroy(&appctx.da)); 198 PetscCall(VecDestroy(&appctx.localwork)); 199 PetscCall(VecDestroy(&appctx.solution)); 200 PetscCall(VecDestroy(&appctx.u_local)); 201 PetscCall(PetscOptionsDestroy(&options)); 202 203 /* 204 Always call PetscFinalize() before exiting a program. This routine 205 - finalizes the PETSc libraries as well as MPI 206 - provides summary and diagnostic information if certain runtime 207 options are chosen (e.g., -log_view). 208 */ 209 PetscCall(PetscFinalize()); 210 return 0; 211 } 212 /* --------------------------------------------------------------------- */ 213 /* 214 InitialConditions - Computes the solution at the initial time. 215 216 Input Parameters: 217 u - uninitialized solution vector (global) 218 appctx - user-defined application context 219 220 Output Parameter: 221 u - vector with solution at initial time (global) 222 */ 223 PetscErrorCode InitialConditions(Vec u,AppCtx *appctx) 224 { 225 PetscScalar *u_localptr,h = appctx->h,x; 226 PetscInt i,mybase,myend; 227 228 /* 229 Determine starting point of each processor's range of 230 grid values. 231 */ 232 PetscCall(VecGetOwnershipRange(u,&mybase,&myend)); 233 234 /* 235 Get a pointer to vector data. 236 - For default PETSc vectors, VecGetArray() returns a pointer to 237 the data array. Otherwise, the routine is implementation dependent. 238 - You MUST call VecRestoreArray() when you no longer need access to 239 the array. 240 - Note that the Fortran interface to VecGetArray() differs from the 241 C version. See the users manual for details. 242 */ 243 PetscCall(VecGetArray(u,&u_localptr)); 244 245 /* 246 We initialize the solution array by simply writing the solution 247 directly into the array locations. Alternatively, we could use 248 VecSetValues() or VecSetValuesLocal(). 249 */ 250 for (i=mybase; i<myend; i++) { 251 x = h*(PetscReal)i; /* current location in global grid */ 252 u_localptr[i-mybase] = 1.0 + x*x; 253 } 254 255 /* 256 Restore vector 257 */ 258 PetscCall(VecRestoreArray(u,&u_localptr)); 259 260 return 0; 261 } 262 /* --------------------------------------------------------------------- */ 263 /* 264 ExactSolution - Computes the exact solution at a given time. 265 266 Input Parameters: 267 t - current time 268 solution - vector in which exact solution will be computed 269 appctx - user-defined application context 270 271 Output Parameter: 272 solution - vector with the newly computed exact solution 273 */ 274 PetscErrorCode ExactSolution(PetscReal t,Vec solution,AppCtx *appctx) 275 { 276 PetscScalar *s_localptr,h = appctx->h,x; 277 PetscInt i,mybase,myend; 278 279 /* 280 Determine starting and ending points of each processor's 281 range of grid values 282 */ 283 PetscCall(VecGetOwnershipRange(solution,&mybase,&myend)); 284 285 /* 286 Get a pointer to vector data. 287 */ 288 PetscCall(VecGetArray(solution,&s_localptr)); 289 290 /* 291 Simply write the solution directly into the array locations. 292 Alternatively, we could use VecSetValues() or VecSetValuesLocal(). 293 */ 294 for (i=mybase; i<myend; i++) { 295 x = h*(PetscReal)i; 296 s_localptr[i-mybase] = (t + 1.0)*(1.0 + x*x); 297 } 298 299 /* 300 Restore vector 301 */ 302 PetscCall(VecRestoreArray(solution,&s_localptr)); 303 return 0; 304 } 305 /* --------------------------------------------------------------------- */ 306 /* 307 RHSFunction - User-provided routine that evalues the right-hand-side 308 function of the ODE. This routine is set in the main program by 309 calling TSSetRHSFunction(). We compute: 310 global_out = F(global_in) 311 312 Input Parameters: 313 ts - timesteping context 314 t - current time 315 global_in - vector containing the current iterate 316 ctx - (optional) user-provided context for function evaluation. 317 In this case we use the appctx defined above. 318 319 Output Parameter: 320 global_out - vector containing the newly evaluated function 321 */ 322 PetscErrorCode RHSFunction(TS ts,PetscReal t,Vec global_in,Vec global_out,void *ctx) 323 { 324 AppCtx *appctx = (AppCtx*) ctx; /* user-defined application context */ 325 DM da = appctx->da; /* distributed array */ 326 Vec local_in = appctx->u_local; /* local ghosted input vector */ 327 Vec localwork = appctx->localwork; /* local ghosted work vector */ 328 PetscInt i,localsize; 329 PetscMPIInt rank,size; 330 PetscScalar *copyptr,sc; 331 const PetscScalar *localptr; 332 333 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 334 Get ready for local function computations 335 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 336 /* 337 Scatter ghost points to local vector, using the 2-step process 338 DMGlobalToLocalBegin(), DMGlobalToLocalEnd(). 339 By placing code between these two statements, computations can be 340 done while messages are in transition. 341 */ 342 PetscCall(DMGlobalToLocalBegin(da,global_in,INSERT_VALUES,local_in)); 343 PetscCall(DMGlobalToLocalEnd(da,global_in,INSERT_VALUES,local_in)); 344 345 /* 346 Access directly the values in our local INPUT work array 347 */ 348 PetscCall(VecGetArrayRead(local_in,&localptr)); 349 350 /* 351 Access directly the values in our local OUTPUT work array 352 */ 353 PetscCall(VecGetArray(localwork,©ptr)); 354 355 sc = 1.0/(appctx->h*appctx->h*2.0*(1.0+t)*(1.0+t)); 356 357 /* 358 Evaluate our function on the nodes owned by this processor 359 */ 360 PetscCall(VecGetLocalSize(local_in,&localsize)); 361 362 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 363 Compute entries for the locally owned part 364 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 365 366 /* 367 Handle boundary conditions: This is done by using the boundary condition 368 u(t,boundary) = g(t,boundary) 369 for some function g. Now take the derivative with respect to t to obtain 370 u_{t}(t,boundary) = g_{t}(t,boundary) 371 372 In our case, u(t,0) = t + 1, so that u_{t}(t,0) = 1 373 and u(t,1) = 2t+ 2, so that u_{t}(t,1) = 2 374 */ 375 PetscCallMPI(MPI_Comm_rank(appctx->comm,&rank)); 376 PetscCallMPI(MPI_Comm_size(appctx->comm,&size)); 377 if (rank == 0) copyptr[0] = 1.0; 378 if (rank == size-1) copyptr[localsize-1] = 2.0; 379 380 /* 381 Handle the interior nodes where the PDE is replace by finite 382 difference operators. 383 */ 384 for (i=1; i<localsize-1; i++) copyptr[i] = localptr[i] * sc * (localptr[i+1] + localptr[i-1] - 2.0*localptr[i]); 385 386 /* 387 Restore vectors 388 */ 389 PetscCall(VecRestoreArrayRead(local_in,&localptr)); 390 PetscCall(VecRestoreArray(localwork,©ptr)); 391 392 /* 393 Insert values from the local OUTPUT vector into the global 394 output vector 395 */ 396 PetscCall(DMLocalToGlobalBegin(da,localwork,INSERT_VALUES,global_out)); 397 PetscCall(DMLocalToGlobalEnd(da,localwork,INSERT_VALUES,global_out)); 398 399 return 0; 400 } 401 /* --------------------------------------------------------------------- */ 402 /* 403 RHSJacobian - User-provided routine to compute the Jacobian of 404 the nonlinear right-hand-side function of the ODE. 405 406 Input Parameters: 407 ts - the TS context 408 t - current time 409 global_in - global input vector 410 dummy - optional user-defined context, as set by TSetRHSJacobian() 411 412 Output Parameters: 413 AA - Jacobian matrix 414 BB - optionally different preconditioning matrix 415 str - flag indicating matrix structure 416 417 Notes: 418 RHSJacobian computes entries for the locally owned part of the Jacobian. 419 - Currently, all PETSc parallel matrix formats are partitioned by 420 contiguous chunks of rows across the processors. 421 - Each processor needs to insert only elements that it owns 422 locally (but any non-local elements will be sent to the 423 appropriate processor during matrix assembly). 424 - Always specify global row and columns of matrix entries when 425 using MatSetValues(). 426 - Here, we set all entries for a particular row at once. 427 - Note that MatSetValues() uses 0-based row and column numbers 428 in Fortran as well as in C. 429 */ 430 PetscErrorCode RHSJacobian(TS ts,PetscReal t,Vec global_in,Mat AA,Mat BB,void *ctx) 431 { 432 AppCtx *appctx = (AppCtx*)ctx; /* user-defined application context */ 433 Vec local_in = appctx->u_local; /* local ghosted input vector */ 434 DM da = appctx->da; /* distributed array */ 435 PetscScalar v[3],sc; 436 const PetscScalar *localptr; 437 PetscInt i,mstart,mend,mstarts,mends,idx[3],is; 438 439 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 440 Get ready for local Jacobian computations 441 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 442 /* 443 Scatter ghost points to local vector, using the 2-step process 444 DMGlobalToLocalBegin(), DMGlobalToLocalEnd(). 445 By placing code between these two statements, computations can be 446 done while messages are in transition. 447 */ 448 PetscCall(DMGlobalToLocalBegin(da,global_in,INSERT_VALUES,local_in)); 449 PetscCall(DMGlobalToLocalEnd(da,global_in,INSERT_VALUES,local_in)); 450 451 /* 452 Get pointer to vector data 453 */ 454 PetscCall(VecGetArrayRead(local_in,&localptr)); 455 456 /* 457 Get starting and ending locally owned rows of the matrix 458 */ 459 PetscCall(MatGetOwnershipRange(BB,&mstarts,&mends)); 460 mstart = mstarts; mend = mends; 461 462 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 463 Compute entries for the locally owned part of the Jacobian. 464 - Currently, all PETSc parallel matrix formats are partitioned by 465 contiguous chunks of rows across the processors. 466 - Each processor needs to insert only elements that it owns 467 locally (but any non-local elements will be sent to the 468 appropriate processor during matrix assembly). 469 - Here, we set all entries for a particular row at once. 470 - We can set matrix entries either using either 471 MatSetValuesLocal() or MatSetValues(). 472 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 473 474 /* 475 Set matrix rows corresponding to boundary data 476 */ 477 if (mstart == 0) { 478 v[0] = 0.0; 479 PetscCall(MatSetValues(BB,1,&mstart,1,&mstart,v,INSERT_VALUES)); 480 mstart++; 481 } 482 if (mend == appctx->m) { 483 mend--; 484 v[0] = 0.0; 485 PetscCall(MatSetValues(BB,1,&mend,1,&mend,v,INSERT_VALUES)); 486 } 487 488 /* 489 Set matrix rows corresponding to interior data. We construct the 490 matrix one row at a time. 491 */ 492 sc = 1.0/(appctx->h*appctx->h*2.0*(1.0+t)*(1.0+t)); 493 for (i=mstart; i<mend; i++) { 494 idx[0] = i-1; idx[1] = i; idx[2] = i+1; 495 is = i - mstart + 1; 496 v[0] = sc*localptr[is]; 497 v[1] = sc*(localptr[is+1] + localptr[is-1] - 4.0*localptr[is]); 498 v[2] = sc*localptr[is]; 499 PetscCall(MatSetValues(BB,1,&i,3,idx,v,INSERT_VALUES)); 500 } 501 502 /* 503 Restore vector 504 */ 505 PetscCall(VecRestoreArrayRead(local_in,&localptr)); 506 507 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 508 Complete the matrix assembly process and set some options 509 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 510 /* 511 Assemble matrix, using the 2-step process: 512 MatAssemblyBegin(), MatAssemblyEnd() 513 Computations can be done while messages are in transition 514 by placing code between these two statements. 515 */ 516 PetscCall(MatAssemblyBegin(BB,MAT_FINAL_ASSEMBLY)); 517 PetscCall(MatAssemblyEnd(BB,MAT_FINAL_ASSEMBLY)); 518 if (BB != AA) { 519 PetscCall(MatAssemblyBegin(AA,MAT_FINAL_ASSEMBLY)); 520 PetscCall(MatAssemblyEnd(AA,MAT_FINAL_ASSEMBLY)); 521 } 522 523 /* 524 Set and option to indicate that we will never add a new nonzero location 525 to the matrix. If we do, it will generate an error. 526 */ 527 PetscCall(MatSetOption(BB,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE)); 528 529 return 0; 530 } 531 532 /*TEST 533 534 test: 535 requires: !single 536 537 TEST*/ 538