1 2 static char help[] ="Tests PetscObjectSetOptions() for TS object\n\n"; 3 4 /* ------------------------------------------------------------------------ 5 6 This program solves the PDE 7 8 u * u_xx 9 u_t = --------- 10 2*(t+1)^2 11 12 on the domain 0 <= x <= 1, with boundary conditions 13 u(t,0) = t + 1, u(t,1) = 2*t + 2, 14 and initial condition 15 u(0,x) = 1 + x*x. 16 17 The exact solution is: 18 u(t,x) = (1 + x*x) * (1 + t) 19 20 Note that since the solution is linear in time and quadratic in x, 21 the finite difference scheme actually computes the "exact" solution. 22 23 We use by default the backward Euler method. 24 25 ------------------------------------------------------------------------- */ 26 27 /* 28 Include "petscts.h" to use the PETSc timestepping routines. Note that 29 this file automatically includes "petscsys.h" and other lower-level 30 PETSc include files. 31 32 Include the "petscdmda.h" to allow us to use the distributed array data 33 structures to manage the parallel grid. 34 */ 35 #include <petscts.h> 36 #include <petscdm.h> 37 #include <petscdmda.h> 38 #include <petscdraw.h> 39 40 /* 41 User-defined application context - contains data needed by the 42 application-provided callback routines. 43 */ 44 typedef struct { 45 MPI_Comm comm; /* communicator */ 46 DM da; /* distributed array data structure */ 47 Vec localwork; /* local ghosted work vector */ 48 Vec u_local; /* local ghosted approximate solution vector */ 49 Vec solution; /* global exact solution vector */ 50 PetscInt m; /* total number of grid points */ 51 PetscReal h; /* mesh width: h = 1/(m-1) */ 52 } AppCtx; 53 54 /* 55 User-defined routines, provided below. 56 */ 57 extern PetscErrorCode InitialConditions(Vec,AppCtx*); 58 extern PetscErrorCode RHSFunction(TS,PetscReal,Vec,Vec,void*); 59 extern PetscErrorCode RHSJacobian(TS,PetscReal,Vec,Mat,Mat,void*); 60 extern PetscErrorCode ExactSolution(PetscReal,Vec,AppCtx*); 61 62 int main(int argc,char **argv) 63 { 64 AppCtx appctx; /* user-defined application context */ 65 TS ts; /* timestepping context */ 66 Mat A; /* Jacobian matrix data structure */ 67 Vec u; /* approximate solution vector */ 68 PetscInt time_steps_max = 100; /* default max timesteps */ 69 PetscReal dt; 70 PetscReal time_total_max = 100.0; /* default max total time */ 71 PetscOptions options,optionscopy; 72 73 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 74 Initialize program and set problem parameters 75 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 76 77 PetscCall(PetscInitialize(&argc,&argv,(char*)0,help)); 78 79 PetscCall(PetscOptionsCreate(&options)); 80 PetscCall(PetscOptionsSetValue(options,"-ts_monitor","ascii")); 81 PetscCall(PetscOptionsSetValue(options,"-snes_monitor","ascii")); 82 PetscCall(PetscOptionsSetValue(options,"-ksp_monitor","ascii")); 83 84 appctx.comm = PETSC_COMM_WORLD; 85 appctx.m = 60; 86 87 PetscCall(PetscOptionsGetInt(options,NULL,"-M",&appctx.m,NULL)); 88 89 appctx.h = 1.0/(appctx.m-1.0); 90 91 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 92 Create vector data structures 93 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 94 95 /* 96 Create distributed array (DMDA) to manage parallel grid and vectors 97 and to set up the ghost point communication pattern. There are M 98 total grid values spread equally among all the processors. 99 */ 100 PetscCall(DMDACreate1d(PETSC_COMM_WORLD,DM_BOUNDARY_NONE,appctx.m,1,1,NULL,&appctx.da)); 101 PetscCall(PetscObjectSetOptions((PetscObject)appctx.da,options)); 102 PetscCall(DMSetFromOptions(appctx.da)); 103 PetscCall(DMSetUp(appctx.da)); 104 105 /* 106 Extract global and local vectors from DMDA; we use these to store the 107 approximate solution. Then duplicate these for remaining vectors that 108 have the same types. 109 */ 110 PetscCall(DMCreateGlobalVector(appctx.da,&u)); 111 PetscCall(DMCreateLocalVector(appctx.da,&appctx.u_local)); 112 113 /* 114 Create local work vector for use in evaluating right-hand-side function; 115 create global work vector for storing exact solution. 116 */ 117 PetscCall(VecDuplicate(appctx.u_local,&appctx.localwork)); 118 PetscCall(VecDuplicate(u,&appctx.solution)); 119 120 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 121 Create timestepping solver context; set callback routine for 122 right-hand-side function evaluation. 123 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 124 125 PetscCall(TSCreate(PETSC_COMM_WORLD,&ts)); 126 PetscCall(PetscObjectSetOptions((PetscObject)ts,options)); 127 PetscCall(TSSetProblemType(ts,TS_NONLINEAR)); 128 PetscCall(TSSetRHSFunction(ts,NULL,RHSFunction,&appctx)); 129 130 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 131 For nonlinear problems, the user can provide a Jacobian evaluation 132 routine (or use a finite differencing approximation). 133 134 Create matrix data structure; set Jacobian evaluation routine. 135 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 136 137 PetscCall(MatCreate(PETSC_COMM_WORLD,&A)); 138 PetscCall(PetscObjectSetOptions((PetscObject)A,options)); 139 PetscCall(MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,appctx.m,appctx.m)); 140 PetscCall(MatSetFromOptions(A)); 141 PetscCall(MatSetUp(A)); 142 PetscCall(TSSetRHSJacobian(ts,A,A,RHSJacobian,&appctx)); 143 144 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 145 Set solution vector and initial timestep 146 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 147 148 dt = appctx.h/2.0; 149 PetscCall(TSSetTimeStep(ts,dt)); 150 151 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 152 Customize timestepping solver: 153 - Set the solution method to be the Backward Euler method. 154 - Set timestepping duration info 155 Then set runtime options, which can override these defaults. 156 For example, 157 -ts_max_steps <maxsteps> -ts_max_time <maxtime> 158 to override the defaults set by TSSetMaxSteps()/TSSetMaxTime(). 159 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 160 161 PetscCall(TSSetType(ts,TSBEULER)); 162 PetscCall(TSSetMaxSteps(ts,time_steps_max)); 163 PetscCall(TSSetMaxTime(ts,time_total_max)); 164 PetscCall(TSSetExactFinalTime(ts,TS_EXACTFINALTIME_STEPOVER)); 165 PetscCall(TSSetFromOptions(ts)); 166 167 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 168 Solve the problem 169 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 170 171 /* 172 Evaluate initial conditions 173 */ 174 PetscCall(InitialConditions(u,&appctx)); 175 176 /* 177 Run the timestepping solver 178 */ 179 PetscCall(TSSolve(ts,u)); 180 181 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 182 Free work space. All PETSc objects should be destroyed when they 183 are no longer needed. 184 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 185 186 PetscCall(PetscObjectGetOptions((PetscObject)ts,&optionscopy)); 187 PetscCheck(options == optionscopy,PETSC_COMM_WORLD,PETSC_ERR_PLIB,"PetscObjectGetOptions() failed"); 188 189 PetscCall(TSDestroy(&ts)); 190 PetscCall(VecDestroy(&u)); 191 PetscCall(MatDestroy(&A)); 192 PetscCall(DMDestroy(&appctx.da)); 193 PetscCall(VecDestroy(&appctx.localwork)); 194 PetscCall(VecDestroy(&appctx.solution)); 195 PetscCall(VecDestroy(&appctx.u_local)); 196 PetscCall(PetscOptionsDestroy(&options)); 197 198 /* 199 Always call PetscFinalize() before exiting a program. This routine 200 - finalizes the PETSc libraries as well as MPI 201 - provides summary and diagnostic information if certain runtime 202 options are chosen (e.g., -log_view). 203 */ 204 PetscCall(PetscFinalize()); 205 return 0; 206 } 207 /* --------------------------------------------------------------------- */ 208 /* 209 InitialConditions - Computes the solution at the initial time. 210 211 Input Parameters: 212 u - uninitialized solution vector (global) 213 appctx - user-defined application context 214 215 Output Parameter: 216 u - vector with solution at initial time (global) 217 */ 218 PetscErrorCode InitialConditions(Vec u,AppCtx *appctx) 219 { 220 PetscScalar *u_localptr,h = appctx->h,x; 221 PetscInt i,mybase,myend; 222 223 /* 224 Determine starting point of each processor's range of 225 grid values. 226 */ 227 PetscCall(VecGetOwnershipRange(u,&mybase,&myend)); 228 229 /* 230 Get a pointer to vector data. 231 - For default PETSc vectors, VecGetArray() returns a pointer to 232 the data array. Otherwise, the routine is implementation dependent. 233 - You MUST call VecRestoreArray() when you no longer need access to 234 the array. 235 - Note that the Fortran interface to VecGetArray() differs from the 236 C version. See the users manual for details. 237 */ 238 PetscCall(VecGetArray(u,&u_localptr)); 239 240 /* 241 We initialize the solution array by simply writing the solution 242 directly into the array locations. Alternatively, we could use 243 VecSetValues() or VecSetValuesLocal(). 244 */ 245 for (i=mybase; i<myend; i++) { 246 x = h*(PetscReal)i; /* current location in global grid */ 247 u_localptr[i-mybase] = 1.0 + x*x; 248 } 249 250 /* 251 Restore vector 252 */ 253 PetscCall(VecRestoreArray(u,&u_localptr)); 254 255 return 0; 256 } 257 /* --------------------------------------------------------------------- */ 258 /* 259 ExactSolution - Computes the exact solution at a given time. 260 261 Input Parameters: 262 t - current time 263 solution - vector in which exact solution will be computed 264 appctx - user-defined application context 265 266 Output Parameter: 267 solution - vector with the newly computed exact solution 268 */ 269 PetscErrorCode ExactSolution(PetscReal t,Vec solution,AppCtx *appctx) 270 { 271 PetscScalar *s_localptr,h = appctx->h,x; 272 PetscInt i,mybase,myend; 273 274 /* 275 Determine starting and ending points of each processor's 276 range of grid values 277 */ 278 PetscCall(VecGetOwnershipRange(solution,&mybase,&myend)); 279 280 /* 281 Get a pointer to vector data. 282 */ 283 PetscCall(VecGetArray(solution,&s_localptr)); 284 285 /* 286 Simply write the solution directly into the array locations. 287 Alternatively, we could use VecSetValues() or VecSetValuesLocal(). 288 */ 289 for (i=mybase; i<myend; i++) { 290 x = h*(PetscReal)i; 291 s_localptr[i-mybase] = (t + 1.0)*(1.0 + x*x); 292 } 293 294 /* 295 Restore vector 296 */ 297 PetscCall(VecRestoreArray(solution,&s_localptr)); 298 return 0; 299 } 300 /* --------------------------------------------------------------------- */ 301 /* 302 RHSFunction - User-provided routine that evalues the right-hand-side 303 function of the ODE. This routine is set in the main program by 304 calling TSSetRHSFunction(). We compute: 305 global_out = F(global_in) 306 307 Input Parameters: 308 ts - timesteping context 309 t - current time 310 global_in - vector containing the current iterate 311 ctx - (optional) user-provided context for function evaluation. 312 In this case we use the appctx defined above. 313 314 Output Parameter: 315 global_out - vector containing the newly evaluated function 316 */ 317 PetscErrorCode RHSFunction(TS ts,PetscReal t,Vec global_in,Vec global_out,void *ctx) 318 { 319 AppCtx *appctx = (AppCtx*) ctx; /* user-defined application context */ 320 DM da = appctx->da; /* distributed array */ 321 Vec local_in = appctx->u_local; /* local ghosted input vector */ 322 Vec localwork = appctx->localwork; /* local ghosted work vector */ 323 PetscInt i,localsize; 324 PetscMPIInt rank,size; 325 PetscScalar *copyptr,sc; 326 const PetscScalar *localptr; 327 328 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 329 Get ready for local function computations 330 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 331 /* 332 Scatter ghost points to local vector, using the 2-step process 333 DMGlobalToLocalBegin(), DMGlobalToLocalEnd(). 334 By placing code between these two statements, computations can be 335 done while messages are in transition. 336 */ 337 PetscCall(DMGlobalToLocalBegin(da,global_in,INSERT_VALUES,local_in)); 338 PetscCall(DMGlobalToLocalEnd(da,global_in,INSERT_VALUES,local_in)); 339 340 /* 341 Access directly the values in our local INPUT work array 342 */ 343 PetscCall(VecGetArrayRead(local_in,&localptr)); 344 345 /* 346 Access directly the values in our local OUTPUT work array 347 */ 348 PetscCall(VecGetArray(localwork,©ptr)); 349 350 sc = 1.0/(appctx->h*appctx->h*2.0*(1.0+t)*(1.0+t)); 351 352 /* 353 Evaluate our function on the nodes owned by this processor 354 */ 355 PetscCall(VecGetLocalSize(local_in,&localsize)); 356 357 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 358 Compute entries for the locally owned part 359 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 360 361 /* 362 Handle boundary conditions: This is done by using the boundary condition 363 u(t,boundary) = g(t,boundary) 364 for some function g. Now take the derivative with respect to t to obtain 365 u_{t}(t,boundary) = g_{t}(t,boundary) 366 367 In our case, u(t,0) = t + 1, so that u_{t}(t,0) = 1 368 and u(t,1) = 2t+ 2, so that u_{t}(t,1) = 2 369 */ 370 PetscCallMPI(MPI_Comm_rank(appctx->comm,&rank)); 371 PetscCallMPI(MPI_Comm_size(appctx->comm,&size)); 372 if (rank == 0) copyptr[0] = 1.0; 373 if (rank == size-1) copyptr[localsize-1] = 2.0; 374 375 /* 376 Handle the interior nodes where the PDE is replace by finite 377 difference operators. 378 */ 379 for (i=1; i<localsize-1; i++) copyptr[i] = localptr[i] * sc * (localptr[i+1] + localptr[i-1] - 2.0*localptr[i]); 380 381 /* 382 Restore vectors 383 */ 384 PetscCall(VecRestoreArrayRead(local_in,&localptr)); 385 PetscCall(VecRestoreArray(localwork,©ptr)); 386 387 /* 388 Insert values from the local OUTPUT vector into the global 389 output vector 390 */ 391 PetscCall(DMLocalToGlobalBegin(da,localwork,INSERT_VALUES,global_out)); 392 PetscCall(DMLocalToGlobalEnd(da,localwork,INSERT_VALUES,global_out)); 393 394 return 0; 395 } 396 /* --------------------------------------------------------------------- */ 397 /* 398 RHSJacobian - User-provided routine to compute the Jacobian of 399 the nonlinear right-hand-side function of the ODE. 400 401 Input Parameters: 402 ts - the TS context 403 t - current time 404 global_in - global input vector 405 dummy - optional user-defined context, as set by TSetRHSJacobian() 406 407 Output Parameters: 408 AA - Jacobian matrix 409 BB - optionally different preconditioning matrix 410 str - flag indicating matrix structure 411 412 Notes: 413 RHSJacobian computes entries for the locally owned part of the Jacobian. 414 - Currently, all PETSc parallel matrix formats are partitioned by 415 contiguous chunks of rows across the processors. 416 - Each processor needs to insert only elements that it owns 417 locally (but any non-local elements will be sent to the 418 appropriate processor during matrix assembly). 419 - Always specify global row and columns of matrix entries when 420 using MatSetValues(). 421 - Here, we set all entries for a particular row at once. 422 - Note that MatSetValues() uses 0-based row and column numbers 423 in Fortran as well as in C. 424 */ 425 PetscErrorCode RHSJacobian(TS ts,PetscReal t,Vec global_in,Mat AA,Mat BB,void *ctx) 426 { 427 AppCtx *appctx = (AppCtx*)ctx; /* user-defined application context */ 428 Vec local_in = appctx->u_local; /* local ghosted input vector */ 429 DM da = appctx->da; /* distributed array */ 430 PetscScalar v[3],sc; 431 const PetscScalar *localptr; 432 PetscInt i,mstart,mend,mstarts,mends,idx[3],is; 433 434 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 435 Get ready for local Jacobian computations 436 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 437 /* 438 Scatter ghost points to local vector, using the 2-step process 439 DMGlobalToLocalBegin(), DMGlobalToLocalEnd(). 440 By placing code between these two statements, computations can be 441 done while messages are in transition. 442 */ 443 PetscCall(DMGlobalToLocalBegin(da,global_in,INSERT_VALUES,local_in)); 444 PetscCall(DMGlobalToLocalEnd(da,global_in,INSERT_VALUES,local_in)); 445 446 /* 447 Get pointer to vector data 448 */ 449 PetscCall(VecGetArrayRead(local_in,&localptr)); 450 451 /* 452 Get starting and ending locally owned rows of the matrix 453 */ 454 PetscCall(MatGetOwnershipRange(BB,&mstarts,&mends)); 455 mstart = mstarts; mend = mends; 456 457 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 458 Compute entries for the locally owned part of the Jacobian. 459 - Currently, all PETSc parallel matrix formats are partitioned by 460 contiguous chunks of rows across the processors. 461 - Each processor needs to insert only elements that it owns 462 locally (but any non-local elements will be sent to the 463 appropriate processor during matrix assembly). 464 - Here, we set all entries for a particular row at once. 465 - We can set matrix entries either using either 466 MatSetValuesLocal() or MatSetValues(). 467 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 468 469 /* 470 Set matrix rows corresponding to boundary data 471 */ 472 if (mstart == 0) { 473 v[0] = 0.0; 474 PetscCall(MatSetValues(BB,1,&mstart,1,&mstart,v,INSERT_VALUES)); 475 mstart++; 476 } 477 if (mend == appctx->m) { 478 mend--; 479 v[0] = 0.0; 480 PetscCall(MatSetValues(BB,1,&mend,1,&mend,v,INSERT_VALUES)); 481 } 482 483 /* 484 Set matrix rows corresponding to interior data. We construct the 485 matrix one row at a time. 486 */ 487 sc = 1.0/(appctx->h*appctx->h*2.0*(1.0+t)*(1.0+t)); 488 for (i=mstart; i<mend; i++) { 489 idx[0] = i-1; idx[1] = i; idx[2] = i+1; 490 is = i - mstart + 1; 491 v[0] = sc*localptr[is]; 492 v[1] = sc*(localptr[is+1] + localptr[is-1] - 4.0*localptr[is]); 493 v[2] = sc*localptr[is]; 494 PetscCall(MatSetValues(BB,1,&i,3,idx,v,INSERT_VALUES)); 495 } 496 497 /* 498 Restore vector 499 */ 500 PetscCall(VecRestoreArrayRead(local_in,&localptr)); 501 502 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 503 Complete the matrix assembly process and set some options 504 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 505 /* 506 Assemble matrix, using the 2-step process: 507 MatAssemblyBegin(), MatAssemblyEnd() 508 Computations can be done while messages are in transition 509 by placing code between these two statements. 510 */ 511 PetscCall(MatAssemblyBegin(BB,MAT_FINAL_ASSEMBLY)); 512 PetscCall(MatAssemblyEnd(BB,MAT_FINAL_ASSEMBLY)); 513 if (BB != AA) { 514 PetscCall(MatAssemblyBegin(AA,MAT_FINAL_ASSEMBLY)); 515 PetscCall(MatAssemblyEnd(AA,MAT_FINAL_ASSEMBLY)); 516 } 517 518 /* 519 Set and option to indicate that we will never add a new nonzero location 520 to the matrix. If we do, it will generate an error. 521 */ 522 PetscCall(MatSetOption(BB,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE)); 523 524 return 0; 525 } 526 527 /*TEST 528 529 test: 530 requires: !single 531 532 TEST*/ 533