1 2 static char help[] ="Solves a simple time-dependent linear PDE (the heat equation).\n\ 3 Input parameters include:\n\ 4 -m <points>, where <points> = number of grid points\n\ 5 -time_dependent_rhs : Treat the problem as having a time-dependent right-hand side\n\ 6 -debug : Activate debugging printouts\n\ 7 -nox : Deactivate x-window graphics\n\n"; 8 9 /* ------------------------------------------------------------------------ 10 11 This program solves the one-dimensional heat equation (also called the 12 diffusion equation), 13 u_t = u_xx, 14 on the domain 0 <= x <= 1, with the boundary conditions 15 u(t,0) = 0, u(t,1) = 0, 16 and the initial condition 17 u(0,x) = sin(6*pi*x) + 3*sin(2*pi*x). 18 This is a linear, second-order, parabolic equation. 19 20 We discretize the right-hand side using finite differences with 21 uniform grid spacing h: 22 u_xx = (u_{i+1} - 2u_{i} + u_{i-1})/(h^2) 23 We then demonstrate time evolution using the various TS methods by 24 running the program via 25 mpiexec -n <procs> ex3 -ts_type <timestepping solver> 26 27 We compare the approximate solution with the exact solution, given by 28 u_exact(x,t) = exp(-36*pi*pi*t) * sin(6*pi*x) + 29 3*exp(-4*pi*pi*t) * sin(2*pi*x) 30 31 Notes: 32 This code demonstrates the TS solver interface to two variants of 33 linear problems, u_t = f(u,t), namely 34 - time-dependent f: f(u,t) is a function of t 35 - time-independent f: f(u,t) is simply f(u) 36 37 The uniprocessor version of this code is ts/tutorials/ex3.c 38 39 ------------------------------------------------------------------------- */ 40 41 /* 42 Include "petscdmda.h" so that we can use distributed arrays (DMDAs) to manage 43 the parallel grid. Include "petscts.h" so that we can use TS solvers. 44 Note that this file automatically includes: 45 petscsys.h - base PETSc routines petscvec.h - vectors 46 petscmat.h - matrices 47 petscis.h - index sets petscksp.h - Krylov subspace methods 48 petscviewer.h - viewers petscpc.h - preconditioners 49 petscksp.h - linear solvers petscsnes.h - nonlinear solvers 50 */ 51 52 #include <petscdm.h> 53 #include <petscdmda.h> 54 #include <petscts.h> 55 #include <petscdraw.h> 56 57 /* 58 User-defined application context - contains data needed by the 59 application-provided call-back routines. 60 */ 61 typedef struct { 62 MPI_Comm comm; /* communicator */ 63 DM da; /* distributed array data structure */ 64 Vec localwork; /* local ghosted work vector */ 65 Vec u_local; /* local ghosted approximate solution vector */ 66 Vec solution; /* global exact solution vector */ 67 PetscInt m; /* total number of grid points */ 68 PetscReal h; /* mesh width h = 1/(m-1) */ 69 PetscBool debug; /* flag (1 indicates activation of debugging printouts) */ 70 PetscViewer viewer1,viewer2; /* viewers for the solution and error */ 71 PetscReal norm_2,norm_max; /* error norms */ 72 } AppCtx; 73 74 /* 75 User-defined routines 76 */ 77 extern PetscErrorCode InitialConditions(Vec,AppCtx*); 78 extern PetscErrorCode RHSMatrixHeat(TS,PetscReal,Vec,Mat,Mat,void*); 79 extern PetscErrorCode RHSFunctionHeat(TS,PetscReal,Vec,Vec,void*); 80 extern PetscErrorCode Monitor(TS,PetscInt,PetscReal,Vec,void*); 81 extern PetscErrorCode ExactSolution(PetscReal,Vec,AppCtx*); 82 83 int main(int argc,char **argv) 84 { 85 AppCtx appctx; /* user-defined application context */ 86 TS ts; /* timestepping context */ 87 Mat A; /* matrix data structure */ 88 Vec u; /* approximate solution vector */ 89 PetscReal time_total_max = 1.0; /* default max total time */ 90 PetscInt time_steps_max = 100; /* default max timesteps */ 91 PetscDraw draw; /* drawing context */ 92 PetscInt steps,m; 93 PetscMPIInt size; 94 PetscReal dt,ftime; 95 PetscBool flg; 96 TSProblemType tsproblem = TS_LINEAR; 97 98 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 99 Initialize program and set problem parameters 100 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 101 102 PetscFunctionBeginUser; 103 PetscCall(PetscInitialize(&argc,&argv,(char*)0,help)); 104 appctx.comm = PETSC_COMM_WORLD; 105 106 m = 60; 107 PetscCall(PetscOptionsGetInt(NULL,NULL,"-m",&m,NULL)); 108 PetscCall(PetscOptionsHasName(NULL,NULL,"-debug",&appctx.debug)); 109 appctx.m = m; 110 appctx.h = 1.0/(m-1.0); 111 appctx.norm_2 = 0.0; 112 appctx.norm_max = 0.0; 113 114 PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD,&size)); 115 PetscCall(PetscPrintf(PETSC_COMM_WORLD,"Solving a linear TS problem, number of processors = %d\n",size)); 116 117 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 118 Create vector data structures 119 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 120 /* 121 Create distributed array (DMDA) to manage parallel grid and vectors 122 and to set up the ghost point communication pattern. There are M 123 total grid values spread equally among all the processors. 124 */ 125 126 PetscCall(DMDACreate1d(PETSC_COMM_WORLD,DM_BOUNDARY_NONE,m,1,1,NULL,&appctx.da)); 127 PetscCall(DMSetFromOptions(appctx.da)); 128 PetscCall(DMSetUp(appctx.da)); 129 130 /* 131 Extract global and local vectors from DMDA; we use these to store the 132 approximate solution. Then duplicate these for remaining vectors that 133 have the same types. 134 */ 135 PetscCall(DMCreateGlobalVector(appctx.da,&u)); 136 PetscCall(DMCreateLocalVector(appctx.da,&appctx.u_local)); 137 138 /* 139 Create local work vector for use in evaluating right-hand-side function; 140 create global work vector for storing exact solution. 141 */ 142 PetscCall(VecDuplicate(appctx.u_local,&appctx.localwork)); 143 PetscCall(VecDuplicate(u,&appctx.solution)); 144 145 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 146 Set up displays to show graphs of the solution and error 147 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 148 149 PetscCall(PetscViewerDrawOpen(PETSC_COMM_WORLD,0,"",80,380,400,160,&appctx.viewer1)); 150 PetscCall(PetscViewerDrawGetDraw(appctx.viewer1,0,&draw)); 151 PetscCall(PetscDrawSetDoubleBuffer(draw)); 152 PetscCall(PetscViewerDrawOpen(PETSC_COMM_WORLD,0,"",80,0,400,160,&appctx.viewer2)); 153 PetscCall(PetscViewerDrawGetDraw(appctx.viewer2,0,&draw)); 154 PetscCall(PetscDrawSetDoubleBuffer(draw)); 155 156 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 157 Create timestepping solver context 158 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 159 160 PetscCall(TSCreate(PETSC_COMM_WORLD,&ts)); 161 162 flg = PETSC_FALSE; 163 PetscCall(PetscOptionsGetBool(NULL,NULL,"-nonlinear",&flg,NULL)); 164 PetscCall(TSSetProblemType(ts,flg ? TS_NONLINEAR : TS_LINEAR)); 165 166 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 167 Set optional user-defined monitoring routine 168 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 169 PetscCall(TSMonitorSet(ts,Monitor,&appctx,NULL)); 170 171 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 172 173 Create matrix data structure; set matrix evaluation routine. 174 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 175 176 PetscCall(MatCreate(PETSC_COMM_WORLD,&A)); 177 PetscCall(MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,m,m)); 178 PetscCall(MatSetFromOptions(A)); 179 PetscCall(MatSetUp(A)); 180 181 flg = PETSC_FALSE; 182 PetscCall(PetscOptionsGetBool(NULL,NULL,"-time_dependent_rhs",&flg,NULL)); 183 if (flg) { 184 /* 185 For linear problems with a time-dependent f(u,t) in the equation 186 u_t = f(u,t), the user provides the discretized right-hand-side 187 as a time-dependent matrix. 188 */ 189 PetscCall(TSSetRHSFunction(ts,NULL,TSComputeRHSFunctionLinear,&appctx)); 190 PetscCall(TSSetRHSJacobian(ts,A,A,RHSMatrixHeat,&appctx)); 191 } else { 192 /* 193 For linear problems with a time-independent f(u) in the equation 194 u_t = f(u), the user provides the discretized right-hand-side 195 as a matrix only once, and then sets a null matrix evaluation 196 routine. 197 */ 198 PetscCall(RHSMatrixHeat(ts,0.0,u,A,A,&appctx)); 199 PetscCall(TSSetRHSFunction(ts,NULL,TSComputeRHSFunctionLinear,&appctx)); 200 PetscCall(TSSetRHSJacobian(ts,A,A,TSComputeRHSJacobianConstant,&appctx)); 201 } 202 203 if (tsproblem == TS_NONLINEAR) { 204 SNES snes; 205 PetscCall(TSSetRHSFunction(ts,NULL,RHSFunctionHeat,&appctx)); 206 PetscCall(TSGetSNES(ts,&snes)); 207 PetscCall(SNESSetJacobian(snes,NULL,NULL,SNESComputeJacobianDefault,NULL)); 208 } 209 210 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 211 Set solution vector and initial timestep 212 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 213 214 dt = appctx.h*appctx.h/2.0; 215 PetscCall(TSSetTimeStep(ts,dt)); 216 PetscCall(TSSetSolution(ts,u)); 217 218 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 219 Customize timestepping solver: 220 - Set the solution method to be the Backward Euler method. 221 - Set timestepping duration info 222 Then set runtime options, which can override these defaults. 223 For example, 224 -ts_max_steps <maxsteps> -ts_max_time <maxtime> 225 to override the defaults set by TSSetMaxSteps()/TSSetMaxTime(). 226 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 227 228 PetscCall(TSSetMaxSteps(ts,time_steps_max)); 229 PetscCall(TSSetMaxTime(ts,time_total_max)); 230 PetscCall(TSSetExactFinalTime(ts,TS_EXACTFINALTIME_STEPOVER)); 231 PetscCall(TSSetFromOptions(ts)); 232 233 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 234 Solve the problem 235 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 236 237 /* 238 Evaluate initial conditions 239 */ 240 PetscCall(InitialConditions(u,&appctx)); 241 242 /* 243 Run the timestepping solver 244 */ 245 PetscCall(TSSolve(ts,u)); 246 PetscCall(TSGetSolveTime(ts,&ftime)); 247 PetscCall(TSGetStepNumber(ts,&steps)); 248 249 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 250 View timestepping solver info 251 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 252 PetscCall(PetscPrintf(PETSC_COMM_WORLD,"Total timesteps %" PetscInt_FMT ", Final time %g\n",steps,(double)ftime)); 253 PetscCall(PetscPrintf(PETSC_COMM_WORLD,"Avg. error (2 norm) = %g Avg. error (max norm) = %g\n",(double)(appctx.norm_2/steps),(double)(appctx.norm_max/steps))); 254 255 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 256 Free work space. All PETSc objects should be destroyed when they 257 are no longer needed. 258 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 259 260 PetscCall(TSDestroy(&ts)); 261 PetscCall(MatDestroy(&A)); 262 PetscCall(VecDestroy(&u)); 263 PetscCall(PetscViewerDestroy(&appctx.viewer1)); 264 PetscCall(PetscViewerDestroy(&appctx.viewer2)); 265 PetscCall(VecDestroy(&appctx.localwork)); 266 PetscCall(VecDestroy(&appctx.solution)); 267 PetscCall(VecDestroy(&appctx.u_local)); 268 PetscCall(DMDestroy(&appctx.da)); 269 270 /* 271 Always call PetscFinalize() before exiting a program. This routine 272 - finalizes the PETSc libraries as well as MPI 273 - provides summary and diagnostic information if certain runtime 274 options are chosen (e.g., -log_view). 275 */ 276 PetscCall(PetscFinalize()); 277 return 0; 278 } 279 /* --------------------------------------------------------------------- */ 280 /* 281 InitialConditions - Computes the solution at the initial time. 282 283 Input Parameter: 284 u - uninitialized solution vector (global) 285 appctx - user-defined application context 286 287 Output Parameter: 288 u - vector with solution at initial time (global) 289 */ 290 PetscErrorCode InitialConditions(Vec u,AppCtx *appctx) 291 { 292 PetscScalar *u_localptr,h = appctx->h; 293 PetscInt i,mybase,myend; 294 295 /* 296 Determine starting point of each processor's range of 297 grid values. 298 */ 299 PetscCall(VecGetOwnershipRange(u,&mybase,&myend)); 300 301 /* 302 Get a pointer to vector data. 303 - For default PETSc vectors, VecGetArray() returns a pointer to 304 the data array. Otherwise, the routine is implementation dependent. 305 - You MUST call VecRestoreArray() when you no longer need access to 306 the array. 307 - Note that the Fortran interface to VecGetArray() differs from the 308 C version. See the users manual for details. 309 */ 310 PetscCall(VecGetArray(u,&u_localptr)); 311 312 /* 313 We initialize the solution array by simply writing the solution 314 directly into the array locations. Alternatively, we could use 315 VecSetValues() or VecSetValuesLocal(). 316 */ 317 for (i=mybase; i<myend; i++) u_localptr[i-mybase] = PetscSinScalar(PETSC_PI*i*6.*h) + 3.*PetscSinScalar(PETSC_PI*i*2.*h); 318 319 /* 320 Restore vector 321 */ 322 PetscCall(VecRestoreArray(u,&u_localptr)); 323 324 /* 325 Print debugging information if desired 326 */ 327 if (appctx->debug) { 328 PetscCall(PetscPrintf(appctx->comm,"initial guess vector\n")); 329 PetscCall(VecView(u,PETSC_VIEWER_STDOUT_WORLD)); 330 } 331 332 return 0; 333 } 334 /* --------------------------------------------------------------------- */ 335 /* 336 ExactSolution - Computes the exact solution at a given time. 337 338 Input Parameters: 339 t - current time 340 solution - vector in which exact solution will be computed 341 appctx - user-defined application context 342 343 Output Parameter: 344 solution - vector with the newly computed exact solution 345 */ 346 PetscErrorCode ExactSolution(PetscReal t,Vec solution,AppCtx *appctx) 347 { 348 PetscScalar *s_localptr,h = appctx->h,ex1,ex2,sc1,sc2; 349 PetscInt i,mybase,myend; 350 351 /* 352 Determine starting and ending points of each processor's 353 range of grid values 354 */ 355 PetscCall(VecGetOwnershipRange(solution,&mybase,&myend)); 356 357 /* 358 Get a pointer to vector data. 359 */ 360 PetscCall(VecGetArray(solution,&s_localptr)); 361 362 /* 363 Simply write the solution directly into the array locations. 364 Alternatively, we culd use VecSetValues() or VecSetValuesLocal(). 365 */ 366 ex1 = PetscExpReal(-36.*PETSC_PI*PETSC_PI*t); ex2 = PetscExpReal(-4.*PETSC_PI*PETSC_PI*t); 367 sc1 = PETSC_PI*6.*h; sc2 = PETSC_PI*2.*h; 368 for (i=mybase; i<myend; i++) s_localptr[i-mybase] = PetscSinScalar(sc1*(PetscReal)i)*ex1 + 3.*PetscSinScalar(sc2*(PetscReal)i)*ex2; 369 370 /* 371 Restore vector 372 */ 373 PetscCall(VecRestoreArray(solution,&s_localptr)); 374 return 0; 375 } 376 /* --------------------------------------------------------------------- */ 377 /* 378 Monitor - User-provided routine to monitor the solution computed at 379 each timestep. This example plots the solution and computes the 380 error in two different norms. 381 382 Input Parameters: 383 ts - the timestep context 384 step - the count of the current step (with 0 meaning the 385 initial condition) 386 time - the current time 387 u - the solution at this timestep 388 ctx - the user-provided context for this monitoring routine. 389 In this case we use the application context which contains 390 information about the problem size, workspace and the exact 391 solution. 392 */ 393 PetscErrorCode Monitor(TS ts,PetscInt step,PetscReal time,Vec u,void *ctx) 394 { 395 AppCtx *appctx = (AppCtx*) ctx; /* user-defined application context */ 396 PetscReal norm_2,norm_max; 397 398 /* 399 View a graph of the current iterate 400 */ 401 PetscCall(VecView(u,appctx->viewer2)); 402 403 /* 404 Compute the exact solution 405 */ 406 PetscCall(ExactSolution(time,appctx->solution,appctx)); 407 408 /* 409 Print debugging information if desired 410 */ 411 if (appctx->debug) { 412 PetscCall(PetscPrintf(appctx->comm,"Computed solution vector\n")); 413 PetscCall(VecView(u,PETSC_VIEWER_STDOUT_WORLD)); 414 PetscCall(PetscPrintf(appctx->comm,"Exact solution vector\n")); 415 PetscCall(VecView(appctx->solution,PETSC_VIEWER_STDOUT_WORLD)); 416 } 417 418 /* 419 Compute the 2-norm and max-norm of the error 420 */ 421 PetscCall(VecAXPY(appctx->solution,-1.0,u)); 422 PetscCall(VecNorm(appctx->solution,NORM_2,&norm_2)); 423 norm_2 = PetscSqrtReal(appctx->h)*norm_2; 424 PetscCall(VecNorm(appctx->solution,NORM_MAX,&norm_max)); 425 if (norm_2 < 1e-14) norm_2 = 0; 426 if (norm_max < 1e-14) norm_max = 0; 427 428 /* 429 PetscPrintf() causes only the first processor in this 430 communicator to print the timestep information. 431 */ 432 PetscCall(PetscPrintf(appctx->comm,"Timestep %" PetscInt_FMT ": time = %g 2-norm error = %g max norm error = %g\n",step,(double)time,(double)norm_2,(double)norm_max)); 433 appctx->norm_2 += norm_2; 434 appctx->norm_max += norm_max; 435 436 /* 437 View a graph of the error 438 */ 439 PetscCall(VecView(appctx->solution,appctx->viewer1)); 440 441 /* 442 Print debugging information if desired 443 */ 444 if (appctx->debug) { 445 PetscCall(PetscPrintf(appctx->comm,"Error vector\n")); 446 PetscCall(VecView(appctx->solution,PETSC_VIEWER_STDOUT_WORLD)); 447 } 448 449 return 0; 450 } 451 452 /* --------------------------------------------------------------------- */ 453 /* 454 RHSMatrixHeat - User-provided routine to compute the right-hand-side 455 matrix for the heat equation. 456 457 Input Parameters: 458 ts - the TS context 459 t - current time 460 global_in - global input vector 461 dummy - optional user-defined context, as set by TSetRHSJacobian() 462 463 Output Parameters: 464 AA - Jacobian matrix 465 BB - optionally different preconditioning matrix 466 str - flag indicating matrix structure 467 468 Notes: 469 RHSMatrixHeat computes entries for the locally owned part of the system. 470 - Currently, all PETSc parallel matrix formats are partitioned by 471 contiguous chunks of rows across the processors. 472 - Each processor needs to insert only elements that it owns 473 locally (but any non-local elements will be sent to the 474 appropriate processor during matrix assembly). 475 - Always specify global row and columns of matrix entries when 476 using MatSetValues(); we could alternatively use MatSetValuesLocal(). 477 - Here, we set all entries for a particular row at once. 478 - Note that MatSetValues() uses 0-based row and column numbers 479 in Fortran as well as in C. 480 */ 481 PetscErrorCode RHSMatrixHeat(TS ts,PetscReal t,Vec X,Mat AA,Mat BB,void *ctx) 482 { 483 Mat A = AA; /* Jacobian matrix */ 484 AppCtx *appctx = (AppCtx*)ctx; /* user-defined application context */ 485 PetscInt i,mstart,mend,idx[3]; 486 PetscScalar v[3],stwo = -2./(appctx->h*appctx->h),sone = -.5*stwo; 487 488 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 489 Compute entries for the locally owned part of the matrix 490 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 491 492 PetscCall(MatGetOwnershipRange(A,&mstart,&mend)); 493 494 /* 495 Set matrix rows corresponding to boundary data 496 */ 497 498 if (mstart == 0) { /* first processor only */ 499 v[0] = 1.0; 500 PetscCall(MatSetValues(A,1,&mstart,1,&mstart,v,INSERT_VALUES)); 501 mstart++; 502 } 503 504 if (mend == appctx->m) { /* last processor only */ 505 mend--; 506 v[0] = 1.0; 507 PetscCall(MatSetValues(A,1,&mend,1,&mend,v,INSERT_VALUES)); 508 } 509 510 /* 511 Set matrix rows corresponding to interior data. We construct the 512 matrix one row at a time. 513 */ 514 v[0] = sone; v[1] = stwo; v[2] = sone; 515 for (i=mstart; i<mend; i++) { 516 idx[0] = i-1; idx[1] = i; idx[2] = i+1; 517 PetscCall(MatSetValues(A,1,&i,3,idx,v,INSERT_VALUES)); 518 } 519 520 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 521 Complete the matrix assembly process and set some options 522 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 523 /* 524 Assemble matrix, using the 2-step process: 525 MatAssemblyBegin(), MatAssemblyEnd() 526 Computations can be done while messages are in transition 527 by placing code between these two statements. 528 */ 529 PetscCall(MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY)); 530 PetscCall(MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY)); 531 532 /* 533 Set and option to indicate that we will never add a new nonzero location 534 to the matrix. If we do, it will generate an error. 535 */ 536 PetscCall(MatSetOption(A,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE)); 537 538 return 0; 539 } 540 541 PetscErrorCode RHSFunctionHeat(TS ts,PetscReal t,Vec globalin,Vec globalout,void *ctx) 542 { 543 Mat A; 544 545 PetscFunctionBeginUser; 546 PetscCall(TSGetRHSJacobian(ts,&A,NULL,NULL,&ctx)); 547 PetscCall(RHSMatrixHeat(ts,t,globalin,A,NULL,ctx)); 548 /* PetscCall(MatView(A,PETSC_VIEWER_STDOUT_WORLD)); */ 549 PetscCall(MatMult(A,globalin,globalout)); 550 PetscFunctionReturn(0); 551 } 552 553 /*TEST 554 555 test: 556 args: -ts_view -nox 557 558 test: 559 suffix: 2 560 args: -ts_view -nox 561 nsize: 3 562 563 test: 564 suffix: 3 565 args: -ts_view -nox -nonlinear 566 567 test: 568 suffix: 4 569 args: -ts_view -nox -nonlinear 570 nsize: 3 571 timeoutfactor: 3 572 573 test: 574 suffix: sundials 575 requires: sundials2 576 args: -nox -ts_type sundials -ts_max_steps 5 -nonlinear 577 nsize: 4 578 579 test: 580 suffix: sundials_dense 581 requires: sundials2 582 args: -nox -ts_type sundials -ts_sundials_use_dense -ts_max_steps 5 -nonlinear 583 nsize: 1 584 585 TEST*/ 586