1 2 static char help[] = "Tests PetscObjectSetOptions() for TS object\n\n"; 3 4 /* ------------------------------------------------------------------------ 5 6 This program solves the PDE 7 8 u * u_xx 9 u_t = --------- 10 2*(t+1)^2 11 12 on the domain 0 <= x <= 1, with boundary conditions 13 u(t,0) = t + 1, u(t,1) = 2*t + 2, 14 and initial condition 15 u(0,x) = 1 + x*x. 16 17 The exact solution is: 18 u(t,x) = (1 + x*x) * (1 + t) 19 20 Note that since the solution is linear in time and quadratic in x, 21 the finite difference scheme actually computes the "exact" solution. 22 23 We use by default the backward Euler method. 24 25 ------------------------------------------------------------------------- */ 26 27 /* 28 Include "petscts.h" to use the PETSc timestepping routines. Note that 29 this file automatically includes "petscsys.h" and other lower-level 30 PETSc include files. 31 32 Include the "petscdmda.h" to allow us to use the distributed array data 33 structures to manage the parallel grid. 34 */ 35 #include <petscts.h> 36 #include <petscdm.h> 37 #include <petscdmda.h> 38 #include <petscdraw.h> 39 40 /* 41 User-defined application context - contains data needed by the 42 application-provided callback routines. 43 */ 44 typedef struct { 45 MPI_Comm comm; /* communicator */ 46 DM da; /* distributed array data structure */ 47 Vec localwork; /* local ghosted work vector */ 48 Vec u_local; /* local ghosted approximate solution vector */ 49 Vec solution; /* global exact solution vector */ 50 PetscInt m; /* total number of grid points */ 51 PetscReal h; /* mesh width: h = 1/(m-1) */ 52 } AppCtx; 53 54 /* 55 User-defined routines, provided below. 56 */ 57 extern PetscErrorCode InitialConditions(Vec, AppCtx *); 58 extern PetscErrorCode RHSFunction(TS, PetscReal, Vec, Vec, void *); 59 extern PetscErrorCode RHSJacobian(TS, PetscReal, Vec, Mat, Mat, void *); 60 extern PetscErrorCode ExactSolution(PetscReal, Vec, AppCtx *); 61 62 int main(int argc, char **argv) { 63 AppCtx appctx; /* user-defined application context */ 64 TS ts; /* timestepping context */ 65 Mat A; /* Jacobian matrix data structure */ 66 Vec u; /* approximate solution vector */ 67 PetscInt time_steps_max = 100; /* default max timesteps */ 68 PetscReal dt; 69 PetscReal time_total_max = 100.0; /* default max total time */ 70 PetscOptions options, optionscopy; 71 72 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 73 Initialize program and set problem parameters 74 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 75 76 PetscFunctionBeginUser; 77 PetscCall(PetscInitialize(&argc, &argv, (char *)0, help)); 78 79 PetscCall(PetscOptionsCreate(&options)); 80 PetscCall(PetscOptionsSetValue(options, "-ts_monitor", "ascii")); 81 PetscCall(PetscOptionsSetValue(options, "-snes_monitor", "ascii")); 82 PetscCall(PetscOptionsSetValue(options, "-ksp_monitor", "ascii")); 83 84 appctx.comm = PETSC_COMM_WORLD; 85 appctx.m = 60; 86 87 PetscCall(PetscOptionsGetInt(options, NULL, "-M", &appctx.m, NULL)); 88 89 appctx.h = 1.0 / (appctx.m - 1.0); 90 91 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 92 Create vector data structures 93 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 94 95 /* 96 Create distributed array (DMDA) to manage parallel grid and vectors 97 and to set up the ghost point communication pattern. There are M 98 total grid values spread equally among all the processors. 99 */ 100 PetscCall(DMDACreate1d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, appctx.m, 1, 1, NULL, &appctx.da)); 101 PetscCall(PetscObjectSetOptions((PetscObject)appctx.da, options)); 102 PetscCall(DMSetFromOptions(appctx.da)); 103 PetscCall(DMSetUp(appctx.da)); 104 105 /* 106 Extract global and local vectors from DMDA; we use these to store the 107 approximate solution. Then duplicate these for remaining vectors that 108 have the same types. 109 */ 110 PetscCall(DMCreateGlobalVector(appctx.da, &u)); 111 PetscCall(DMCreateLocalVector(appctx.da, &appctx.u_local)); 112 113 /* 114 Create local work vector for use in evaluating right-hand-side function; 115 create global work vector for storing exact solution. 116 */ 117 PetscCall(VecDuplicate(appctx.u_local, &appctx.localwork)); 118 PetscCall(VecDuplicate(u, &appctx.solution)); 119 120 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 121 Create timestepping solver context; set callback routine for 122 right-hand-side function evaluation. 123 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 124 125 PetscCall(TSCreate(PETSC_COMM_WORLD, &ts)); 126 PetscCall(PetscObjectSetOptions((PetscObject)ts, options)); 127 PetscCall(TSSetProblemType(ts, TS_NONLINEAR)); 128 PetscCall(TSSetRHSFunction(ts, NULL, RHSFunction, &appctx)); 129 130 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 131 For nonlinear problems, the user can provide a Jacobian evaluation 132 routine (or use a finite differencing approximation). 133 134 Create matrix data structure; set Jacobian evaluation routine. 135 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 136 137 PetscCall(MatCreate(PETSC_COMM_WORLD, &A)); 138 PetscCall(PetscObjectSetOptions((PetscObject)A, options)); 139 PetscCall(MatSetSizes(A, PETSC_DECIDE, PETSC_DECIDE, appctx.m, appctx.m)); 140 PetscCall(MatSetFromOptions(A)); 141 PetscCall(MatSetUp(A)); 142 PetscCall(TSSetRHSJacobian(ts, A, A, RHSJacobian, &appctx)); 143 144 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 145 Set solution vector and initial timestep 146 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 147 148 dt = appctx.h / 2.0; 149 PetscCall(TSSetTimeStep(ts, dt)); 150 151 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 152 Customize timestepping solver: 153 - Set the solution method to be the Backward Euler method. 154 - Set timestepping duration info 155 Then set runtime options, which can override these defaults. 156 For example, 157 -ts_max_steps <maxsteps> -ts_max_time <maxtime> 158 to override the defaults set by TSSetMaxSteps()/TSSetMaxTime(). 159 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 160 161 PetscCall(TSSetType(ts, TSBEULER)); 162 PetscCall(TSSetMaxSteps(ts, time_steps_max)); 163 PetscCall(TSSetMaxTime(ts, time_total_max)); 164 PetscCall(TSSetExactFinalTime(ts, TS_EXACTFINALTIME_STEPOVER)); 165 PetscCall(TSSetFromOptions(ts)); 166 167 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 168 Solve the problem 169 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 170 171 /* 172 Evaluate initial conditions 173 */ 174 PetscCall(InitialConditions(u, &appctx)); 175 176 /* 177 Run the timestepping solver 178 */ 179 PetscCall(TSSolve(ts, u)); 180 181 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 182 Free work space. All PETSc objects should be destroyed when they 183 are no longer needed. 184 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 185 186 PetscCall(PetscObjectGetOptions((PetscObject)ts, &optionscopy)); 187 PetscCheck(options == optionscopy, PETSC_COMM_WORLD, PETSC_ERR_PLIB, "PetscObjectGetOptions() failed"); 188 189 PetscCall(TSDestroy(&ts)); 190 PetscCall(VecDestroy(&u)); 191 PetscCall(MatDestroy(&A)); 192 PetscCall(DMDestroy(&appctx.da)); 193 PetscCall(VecDestroy(&appctx.localwork)); 194 PetscCall(VecDestroy(&appctx.solution)); 195 PetscCall(VecDestroy(&appctx.u_local)); 196 PetscCall(PetscOptionsDestroy(&options)); 197 198 /* 199 Always call PetscFinalize() before exiting a program. This routine 200 - finalizes the PETSc libraries as well as MPI 201 - provides summary and diagnostic information if certain runtime 202 options are chosen (e.g., -log_view). 203 */ 204 PetscCall(PetscFinalize()); 205 return 0; 206 } 207 /* --------------------------------------------------------------------- */ 208 /* 209 InitialConditions - Computes the solution at the initial time. 210 211 Input Parameters: 212 u - uninitialized solution vector (global) 213 appctx - user-defined application context 214 215 Output Parameter: 216 u - vector with solution at initial time (global) 217 */ 218 PetscErrorCode InitialConditions(Vec u, AppCtx *appctx) { 219 PetscScalar *u_localptr, h = appctx->h, x; 220 PetscInt i, mybase, myend; 221 222 /* 223 Determine starting point of each processor's range of 224 grid values. 225 */ 226 PetscCall(VecGetOwnershipRange(u, &mybase, &myend)); 227 228 /* 229 Get a pointer to vector data. 230 - For default PETSc vectors, VecGetArray() returns a pointer to 231 the data array. Otherwise, the routine is implementation dependent. 232 - You MUST call VecRestoreArray() when you no longer need access to 233 the array. 234 - Note that the Fortran interface to VecGetArray() differs from the 235 C version. See the users manual for details. 236 */ 237 PetscCall(VecGetArray(u, &u_localptr)); 238 239 /* 240 We initialize the solution array by simply writing the solution 241 directly into the array locations. Alternatively, we could use 242 VecSetValues() or VecSetValuesLocal(). 243 */ 244 for (i = mybase; i < myend; i++) { 245 x = h * (PetscReal)i; /* current location in global grid */ 246 u_localptr[i - mybase] = 1.0 + x * x; 247 } 248 249 /* 250 Restore vector 251 */ 252 PetscCall(VecRestoreArray(u, &u_localptr)); 253 254 return 0; 255 } 256 /* --------------------------------------------------------------------- */ 257 /* 258 ExactSolution - Computes the exact solution at a given time. 259 260 Input Parameters: 261 t - current time 262 solution - vector in which exact solution will be computed 263 appctx - user-defined application context 264 265 Output Parameter: 266 solution - vector with the newly computed exact solution 267 */ 268 PetscErrorCode ExactSolution(PetscReal t, Vec solution, AppCtx *appctx) { 269 PetscScalar *s_localptr, h = appctx->h, x; 270 PetscInt i, mybase, myend; 271 272 /* 273 Determine starting and ending points of each processor's 274 range of grid values 275 */ 276 PetscCall(VecGetOwnershipRange(solution, &mybase, &myend)); 277 278 /* 279 Get a pointer to vector data. 280 */ 281 PetscCall(VecGetArray(solution, &s_localptr)); 282 283 /* 284 Simply write the solution directly into the array locations. 285 Alternatively, we could use VecSetValues() or VecSetValuesLocal(). 286 */ 287 for (i = mybase; i < myend; i++) { 288 x = h * (PetscReal)i; 289 s_localptr[i - mybase] = (t + 1.0) * (1.0 + x * x); 290 } 291 292 /* 293 Restore vector 294 */ 295 PetscCall(VecRestoreArray(solution, &s_localptr)); 296 return 0; 297 } 298 /* --------------------------------------------------------------------- */ 299 /* 300 RHSFunction - User-provided routine that evalues the right-hand-side 301 function of the ODE. This routine is set in the main program by 302 calling TSSetRHSFunction(). We compute: 303 global_out = F(global_in) 304 305 Input Parameters: 306 ts - timesteping context 307 t - current time 308 global_in - vector containing the current iterate 309 ctx - (optional) user-provided context for function evaluation. 310 In this case we use the appctx defined above. 311 312 Output Parameter: 313 global_out - vector containing the newly evaluated function 314 */ 315 PetscErrorCode RHSFunction(TS ts, PetscReal t, Vec global_in, Vec global_out, void *ctx) { 316 AppCtx *appctx = (AppCtx *)ctx; /* user-defined application context */ 317 DM da = appctx->da; /* distributed array */ 318 Vec local_in = appctx->u_local; /* local ghosted input vector */ 319 Vec localwork = appctx->localwork; /* local ghosted work vector */ 320 PetscInt i, localsize; 321 PetscMPIInt rank, size; 322 PetscScalar *copyptr, sc; 323 const PetscScalar *localptr; 324 325 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 326 Get ready for local function computations 327 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 328 /* 329 Scatter ghost points to local vector, using the 2-step process 330 DMGlobalToLocalBegin(), DMGlobalToLocalEnd(). 331 By placing code between these two statements, computations can be 332 done while messages are in transition. 333 */ 334 PetscCall(DMGlobalToLocalBegin(da, global_in, INSERT_VALUES, local_in)); 335 PetscCall(DMGlobalToLocalEnd(da, global_in, INSERT_VALUES, local_in)); 336 337 /* 338 Access directly the values in our local INPUT work array 339 */ 340 PetscCall(VecGetArrayRead(local_in, &localptr)); 341 342 /* 343 Access directly the values in our local OUTPUT work array 344 */ 345 PetscCall(VecGetArray(localwork, ©ptr)); 346 347 sc = 1.0 / (appctx->h * appctx->h * 2.0 * (1.0 + t) * (1.0 + t)); 348 349 /* 350 Evaluate our function on the nodes owned by this processor 351 */ 352 PetscCall(VecGetLocalSize(local_in, &localsize)); 353 354 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 355 Compute entries for the locally owned part 356 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 357 358 /* 359 Handle boundary conditions: This is done by using the boundary condition 360 u(t,boundary) = g(t,boundary) 361 for some function g. Now take the derivative with respect to t to obtain 362 u_{t}(t,boundary) = g_{t}(t,boundary) 363 364 In our case, u(t,0) = t + 1, so that u_{t}(t,0) = 1 365 and u(t,1) = 2t+ 2, so that u_{t}(t,1) = 2 366 */ 367 PetscCallMPI(MPI_Comm_rank(appctx->comm, &rank)); 368 PetscCallMPI(MPI_Comm_size(appctx->comm, &size)); 369 if (rank == 0) copyptr[0] = 1.0; 370 if (rank == size - 1) copyptr[localsize - 1] = 2.0; 371 372 /* 373 Handle the interior nodes where the PDE is replace by finite 374 difference operators. 375 */ 376 for (i = 1; i < localsize - 1; i++) copyptr[i] = localptr[i] * sc * (localptr[i + 1] + localptr[i - 1] - 2.0 * localptr[i]); 377 378 /* 379 Restore vectors 380 */ 381 PetscCall(VecRestoreArrayRead(local_in, &localptr)); 382 PetscCall(VecRestoreArray(localwork, ©ptr)); 383 384 /* 385 Insert values from the local OUTPUT vector into the global 386 output vector 387 */ 388 PetscCall(DMLocalToGlobalBegin(da, localwork, INSERT_VALUES, global_out)); 389 PetscCall(DMLocalToGlobalEnd(da, localwork, INSERT_VALUES, global_out)); 390 391 return 0; 392 } 393 /* --------------------------------------------------------------------- */ 394 /* 395 RHSJacobian - User-provided routine to compute the Jacobian of 396 the nonlinear right-hand-side function of the ODE. 397 398 Input Parameters: 399 ts - the TS context 400 t - current time 401 global_in - global input vector 402 dummy - optional user-defined context, as set by TSetRHSJacobian() 403 404 Output Parameters: 405 AA - Jacobian matrix 406 BB - optionally different preconditioning matrix 407 str - flag indicating matrix structure 408 409 Notes: 410 RHSJacobian computes entries for the locally owned part of the Jacobian. 411 - Currently, all PETSc parallel matrix formats are partitioned by 412 contiguous chunks of rows across the processors. 413 - Each processor needs to insert only elements that it owns 414 locally (but any non-local elements will be sent to the 415 appropriate processor during matrix assembly). 416 - Always specify global row and columns of matrix entries when 417 using MatSetValues(). 418 - Here, we set all entries for a particular row at once. 419 - Note that MatSetValues() uses 0-based row and column numbers 420 in Fortran as well as in C. 421 */ 422 PetscErrorCode RHSJacobian(TS ts, PetscReal t, Vec global_in, Mat AA, Mat BB, void *ctx) { 423 AppCtx *appctx = (AppCtx *)ctx; /* user-defined application context */ 424 Vec local_in = appctx->u_local; /* local ghosted input vector */ 425 DM da = appctx->da; /* distributed array */ 426 PetscScalar v[3], sc; 427 const PetscScalar *localptr; 428 PetscInt i, mstart, mend, mstarts, mends, idx[3], is; 429 430 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 431 Get ready for local Jacobian computations 432 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 433 /* 434 Scatter ghost points to local vector, using the 2-step process 435 DMGlobalToLocalBegin(), DMGlobalToLocalEnd(). 436 By placing code between these two statements, computations can be 437 done while messages are in transition. 438 */ 439 PetscCall(DMGlobalToLocalBegin(da, global_in, INSERT_VALUES, local_in)); 440 PetscCall(DMGlobalToLocalEnd(da, global_in, INSERT_VALUES, local_in)); 441 442 /* 443 Get pointer to vector data 444 */ 445 PetscCall(VecGetArrayRead(local_in, &localptr)); 446 447 /* 448 Get starting and ending locally owned rows of the matrix 449 */ 450 PetscCall(MatGetOwnershipRange(BB, &mstarts, &mends)); 451 mstart = mstarts; 452 mend = mends; 453 454 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 455 Compute entries for the locally owned part of the Jacobian. 456 - Currently, all PETSc parallel matrix formats are partitioned by 457 contiguous chunks of rows across the processors. 458 - Each processor needs to insert only elements that it owns 459 locally (but any non-local elements will be sent to the 460 appropriate processor during matrix assembly). 461 - Here, we set all entries for a particular row at once. 462 - We can set matrix entries either using either 463 MatSetValuesLocal() or MatSetValues(). 464 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 465 466 /* 467 Set matrix rows corresponding to boundary data 468 */ 469 if (mstart == 0) { 470 v[0] = 0.0; 471 PetscCall(MatSetValues(BB, 1, &mstart, 1, &mstart, v, INSERT_VALUES)); 472 mstart++; 473 } 474 if (mend == appctx->m) { 475 mend--; 476 v[0] = 0.0; 477 PetscCall(MatSetValues(BB, 1, &mend, 1, &mend, v, INSERT_VALUES)); 478 } 479 480 /* 481 Set matrix rows corresponding to interior data. We construct the 482 matrix one row at a time. 483 */ 484 sc = 1.0 / (appctx->h * appctx->h * 2.0 * (1.0 + t) * (1.0 + t)); 485 for (i = mstart; i < mend; i++) { 486 idx[0] = i - 1; 487 idx[1] = i; 488 idx[2] = i + 1; 489 is = i - mstart + 1; 490 v[0] = sc * localptr[is]; 491 v[1] = sc * (localptr[is + 1] + localptr[is - 1] - 4.0 * localptr[is]); 492 v[2] = sc * localptr[is]; 493 PetscCall(MatSetValues(BB, 1, &i, 3, idx, v, INSERT_VALUES)); 494 } 495 496 /* 497 Restore vector 498 */ 499 PetscCall(VecRestoreArrayRead(local_in, &localptr)); 500 501 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 502 Complete the matrix assembly process and set some options 503 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 504 /* 505 Assemble matrix, using the 2-step process: 506 MatAssemblyBegin(), MatAssemblyEnd() 507 Computations can be done while messages are in transition 508 by placing code between these two statements. 509 */ 510 PetscCall(MatAssemblyBegin(BB, MAT_FINAL_ASSEMBLY)); 511 PetscCall(MatAssemblyEnd(BB, MAT_FINAL_ASSEMBLY)); 512 if (BB != AA) { 513 PetscCall(MatAssemblyBegin(AA, MAT_FINAL_ASSEMBLY)); 514 PetscCall(MatAssemblyEnd(AA, MAT_FINAL_ASSEMBLY)); 515 } 516 517 /* 518 Set and option to indicate that we will never add a new nonzero location 519 to the matrix. If we do, it will generate an error. 520 */ 521 PetscCall(MatSetOption(BB, MAT_NEW_NONZERO_LOCATION_ERR, PETSC_TRUE)); 522 523 return 0; 524 } 525 526 /*TEST 527 528 test: 529 requires: !single 530 531 TEST*/ 532