1 2 static char help[] = "Solves a time-dependent nonlinear PDE. Uses implicit\n\ 3 timestepping. Runtime options include:\n\ 4 -M <xg>, where <xg> = number of grid points\n\ 5 -debug : Activate debugging printouts\n\ 6 -nox : Deactivate x-window graphics\n\n"; 7 8 /* ------------------------------------------------------------------------ 9 10 This program solves the PDE 11 12 u * u_xx 13 u_t = --------- 14 2*(t+1)^2 15 16 on the domain 0 <= x <= 1, with boundary conditions 17 u(t,0) = t + 1, u(t,1) = 2*t + 2, 18 and initial condition 19 u(0,x) = 1 + x*x. 20 21 The exact solution is: 22 u(t,x) = (1 + x*x) * (1 + t) 23 24 Note that since the solution is linear in time and quadratic in x, 25 the finite difference scheme actually computes the "exact" solution. 26 27 We use by default the backward Euler method. 28 29 ------------------------------------------------------------------------- */ 30 31 /* 32 Include "petscts.h" to use the PETSc timestepping routines. Note that 33 this file automatically includes "petscsys.h" and other lower-level 34 PETSc include files. 35 36 Include the "petscdmda.h" to allow us to use the distributed array data 37 structures to manage the parallel grid. 38 */ 39 #include <petscts.h> 40 #include <petscdm.h> 41 #include <petscdmda.h> 42 #include <petscdraw.h> 43 44 /* 45 User-defined application context - contains data needed by the 46 application-provided callback routines. 47 */ 48 typedef struct { 49 MPI_Comm comm; /* communicator */ 50 DM da; /* distributed array data structure */ 51 Vec localwork; /* local ghosted work vector */ 52 Vec u_local; /* local ghosted approximate solution vector */ 53 Vec solution; /* global exact solution vector */ 54 PetscInt m; /* total number of grid points */ 55 PetscReal h; /* mesh width: h = 1/(m-1) */ 56 PetscBool debug; /* flag (1 indicates activation of debugging printouts) */ 57 } AppCtx; 58 59 /* 60 User-defined routines, provided below. 61 */ 62 extern PetscErrorCode InitialConditions(Vec, AppCtx *); 63 extern PetscErrorCode RHSFunction(TS, PetscReal, Vec, Vec, void *); 64 extern PetscErrorCode RHSJacobian(TS, PetscReal, Vec, Mat, Mat, void *); 65 extern PetscErrorCode Monitor(TS, PetscInt, PetscReal, Vec, void *); 66 extern PetscErrorCode ExactSolution(PetscReal, Vec, AppCtx *); 67 68 int main(int argc, char **argv) { 69 AppCtx appctx; /* user-defined application context */ 70 TS ts; /* timestepping context */ 71 Mat A; /* Jacobian matrix data structure */ 72 Vec u; /* approximate solution vector */ 73 PetscInt time_steps_max = 100; /* default max timesteps */ 74 PetscReal dt; 75 PetscReal time_total_max = 100.0; /* default max total time */ 76 PetscBool mymonitor = PETSC_FALSE; 77 PetscReal bounds[] = {1.0, 3.3}; 78 79 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 80 Initialize program and set problem parameters 81 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 82 83 PetscFunctionBeginUser; 84 PetscCall(PetscInitialize(&argc, &argv, (char *)0, help)); 85 PetscCall(PetscViewerDrawSetBounds(PETSC_VIEWER_DRAW_(PETSC_COMM_WORLD), 1, bounds)); 86 87 appctx.comm = PETSC_COMM_WORLD; 88 appctx.m = 60; 89 90 PetscCall(PetscOptionsGetInt(NULL, NULL, "-M", &appctx.m, NULL)); 91 PetscCall(PetscOptionsHasName(NULL, NULL, "-debug", &appctx.debug)); 92 PetscCall(PetscOptionsHasName(NULL, NULL, "-mymonitor", &mymonitor)); 93 94 appctx.h = 1.0 / (appctx.m - 1.0); 95 96 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 97 Create vector data structures 98 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 99 100 /* 101 Create distributed array (DMDA) to manage parallel grid and vectors 102 and to set up the ghost point communication pattern. There are M 103 total grid values spread equally among all the processors. 104 */ 105 PetscCall(DMDACreate1d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, appctx.m, 1, 1, NULL, &appctx.da)); 106 PetscCall(DMSetFromOptions(appctx.da)); 107 PetscCall(DMSetUp(appctx.da)); 108 109 /* 110 Extract global and local vectors from DMDA; we use these to store the 111 approximate solution. Then duplicate these for remaining vectors that 112 have the same types. 113 */ 114 PetscCall(DMCreateGlobalVector(appctx.da, &u)); 115 PetscCall(DMCreateLocalVector(appctx.da, &appctx.u_local)); 116 117 /* 118 Create local work vector for use in evaluating right-hand-side function; 119 create global work vector for storing exact solution. 120 */ 121 PetscCall(VecDuplicate(appctx.u_local, &appctx.localwork)); 122 PetscCall(VecDuplicate(u, &appctx.solution)); 123 124 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 125 Create timestepping solver context; set callback routine for 126 right-hand-side function evaluation. 127 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 128 129 PetscCall(TSCreate(PETSC_COMM_WORLD, &ts)); 130 PetscCall(TSSetProblemType(ts, TS_NONLINEAR)); 131 PetscCall(TSSetRHSFunction(ts, NULL, RHSFunction, &appctx)); 132 133 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 134 Set optional user-defined monitoring routine 135 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 136 137 if (mymonitor) PetscCall(TSMonitorSet(ts, Monitor, &appctx, NULL)); 138 139 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 140 For nonlinear problems, the user can provide a Jacobian evaluation 141 routine (or use a finite differencing approximation). 142 143 Create matrix data structure; set Jacobian evaluation routine. 144 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 145 146 PetscCall(MatCreate(PETSC_COMM_WORLD, &A)); 147 PetscCall(MatSetSizes(A, PETSC_DECIDE, PETSC_DECIDE, appctx.m, appctx.m)); 148 PetscCall(MatSetFromOptions(A)); 149 PetscCall(MatSetUp(A)); 150 PetscCall(TSSetRHSJacobian(ts, A, A, RHSJacobian, &appctx)); 151 152 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 153 Set solution vector and initial timestep 154 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 155 156 dt = appctx.h / 2.0; 157 PetscCall(TSSetTimeStep(ts, dt)); 158 159 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 160 Customize timestepping solver: 161 - Set the solution method to be the Backward Euler method. 162 - Set timestepping duration info 163 Then set runtime options, which can override these defaults. 164 For example, 165 -ts_max_steps <maxsteps> -ts_max_time <maxtime> 166 to override the defaults set by TSSetMaxSteps()/TSSetMaxTime(). 167 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 168 169 PetscCall(TSSetType(ts, TSBEULER)); 170 PetscCall(TSSetMaxSteps(ts, time_steps_max)); 171 PetscCall(TSSetMaxTime(ts, time_total_max)); 172 PetscCall(TSSetExactFinalTime(ts, TS_EXACTFINALTIME_STEPOVER)); 173 PetscCall(TSSetFromOptions(ts)); 174 175 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 176 Solve the problem 177 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 178 179 /* 180 Evaluate initial conditions 181 */ 182 PetscCall(InitialConditions(u, &appctx)); 183 184 /* 185 Run the timestepping solver 186 */ 187 PetscCall(TSSolve(ts, u)); 188 189 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 190 Free work space. All PETSc objects should be destroyed when they 191 are no longer needed. 192 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 193 194 PetscCall(TSDestroy(&ts)); 195 PetscCall(VecDestroy(&u)); 196 PetscCall(MatDestroy(&A)); 197 PetscCall(DMDestroy(&appctx.da)); 198 PetscCall(VecDestroy(&appctx.localwork)); 199 PetscCall(VecDestroy(&appctx.solution)); 200 PetscCall(VecDestroy(&appctx.u_local)); 201 202 /* 203 Always call PetscFinalize() before exiting a program. This routine 204 - finalizes the PETSc libraries as well as MPI 205 - provides summary and diagnostic information if certain runtime 206 options are chosen (e.g., -log_view). 207 */ 208 PetscCall(PetscFinalize()); 209 return 0; 210 } 211 /* --------------------------------------------------------------------- */ 212 /* 213 InitialConditions - Computes the solution at the initial time. 214 215 Input Parameters: 216 u - uninitialized solution vector (global) 217 appctx - user-defined application context 218 219 Output Parameter: 220 u - vector with solution at initial time (global) 221 */ 222 PetscErrorCode InitialConditions(Vec u, AppCtx *appctx) { 223 PetscScalar *u_localptr, h = appctx->h, x; 224 PetscInt i, mybase, myend; 225 226 /* 227 Determine starting point of each processor's range of 228 grid values. 229 */ 230 PetscCall(VecGetOwnershipRange(u, &mybase, &myend)); 231 232 /* 233 Get a pointer to vector data. 234 - For default PETSc vectors, VecGetArray() returns a pointer to 235 the data array. Otherwise, the routine is implementation dependent. 236 - You MUST call VecRestoreArray() when you no longer need access to 237 the array. 238 - Note that the Fortran interface to VecGetArray() differs from the 239 C version. See the users manual for details. 240 */ 241 PetscCall(VecGetArray(u, &u_localptr)); 242 243 /* 244 We initialize the solution array by simply writing the solution 245 directly into the array locations. Alternatively, we could use 246 VecSetValues() or VecSetValuesLocal(). 247 */ 248 for (i = mybase; i < myend; i++) { 249 x = h * (PetscReal)i; /* current location in global grid */ 250 u_localptr[i - mybase] = 1.0 + x * x; 251 } 252 253 /* 254 Restore vector 255 */ 256 PetscCall(VecRestoreArray(u, &u_localptr)); 257 258 /* 259 Print debugging information if desired 260 */ 261 if (appctx->debug) { 262 PetscCall(PetscPrintf(appctx->comm, "initial guess vector\n")); 263 PetscCall(VecView(u, PETSC_VIEWER_STDOUT_WORLD)); 264 } 265 266 return 0; 267 } 268 /* --------------------------------------------------------------------- */ 269 /* 270 ExactSolution - Computes the exact solution at a given time. 271 272 Input Parameters: 273 t - current time 274 solution - vector in which exact solution will be computed 275 appctx - user-defined application context 276 277 Output Parameter: 278 solution - vector with the newly computed exact solution 279 */ 280 PetscErrorCode ExactSolution(PetscReal t, Vec solution, AppCtx *appctx) { 281 PetscScalar *s_localptr, h = appctx->h, x; 282 PetscInt i, mybase, myend; 283 284 /* 285 Determine starting and ending points of each processor's 286 range of grid values 287 */ 288 PetscCall(VecGetOwnershipRange(solution, &mybase, &myend)); 289 290 /* 291 Get a pointer to vector data. 292 */ 293 PetscCall(VecGetArray(solution, &s_localptr)); 294 295 /* 296 Simply write the solution directly into the array locations. 297 Alternatively, we could use VecSetValues() or VecSetValuesLocal(). 298 */ 299 for (i = mybase; i < myend; i++) { 300 x = h * (PetscReal)i; 301 s_localptr[i - mybase] = (t + 1.0) * (1.0 + x * x); 302 } 303 304 /* 305 Restore vector 306 */ 307 PetscCall(VecRestoreArray(solution, &s_localptr)); 308 return 0; 309 } 310 /* --------------------------------------------------------------------- */ 311 /* 312 Monitor - User-provided routine to monitor the solution computed at 313 each timestep. This example plots the solution and computes the 314 error in two different norms. 315 316 Input Parameters: 317 ts - the timestep context 318 step - the count of the current step (with 0 meaning the 319 initial condition) 320 time - the current time 321 u - the solution at this timestep 322 ctx - the user-provided context for this monitoring routine. 323 In this case we use the application context which contains 324 information about the problem size, workspace and the exact 325 solution. 326 */ 327 PetscErrorCode Monitor(TS ts, PetscInt step, PetscReal time, Vec u, void *ctx) { 328 AppCtx *appctx = (AppCtx *)ctx; /* user-defined application context */ 329 PetscReal en2, en2s, enmax; 330 PetscDraw draw; 331 332 /* 333 We use the default X Windows viewer 334 PETSC_VIEWER_DRAW_(appctx->comm) 335 that is associated with the current communicator. This saves 336 the effort of calling PetscViewerDrawOpen() to create the window. 337 Note that if we wished to plot several items in separate windows we 338 would create each viewer with PetscViewerDrawOpen() and store them in 339 the application context, appctx. 340 341 PetscReal buffering makes graphics look better. 342 */ 343 PetscCall(PetscViewerDrawGetDraw(PETSC_VIEWER_DRAW_(appctx->comm), 0, &draw)); 344 PetscCall(PetscDrawSetDoubleBuffer(draw)); 345 PetscCall(VecView(u, PETSC_VIEWER_DRAW_(appctx->comm))); 346 347 /* 348 Compute the exact solution at this timestep 349 */ 350 PetscCall(ExactSolution(time, appctx->solution, appctx)); 351 352 /* 353 Print debugging information if desired 354 */ 355 if (appctx->debug) { 356 PetscCall(PetscPrintf(appctx->comm, "Computed solution vector\n")); 357 PetscCall(VecView(u, PETSC_VIEWER_STDOUT_WORLD)); 358 PetscCall(PetscPrintf(appctx->comm, "Exact solution vector\n")); 359 PetscCall(VecView(appctx->solution, PETSC_VIEWER_STDOUT_WORLD)); 360 } 361 362 /* 363 Compute the 2-norm and max-norm of the error 364 */ 365 PetscCall(VecAXPY(appctx->solution, -1.0, u)); 366 PetscCall(VecNorm(appctx->solution, NORM_2, &en2)); 367 en2s = PetscSqrtReal(appctx->h) * en2; /* scale the 2-norm by the grid spacing */ 368 PetscCall(VecNorm(appctx->solution, NORM_MAX, &enmax)); 369 370 /* 371 PetscPrintf() causes only the first processor in this 372 communicator to print the timestep information. 373 */ 374 PetscCall(PetscPrintf(appctx->comm, "Timestep %" PetscInt_FMT ": time = %g 2-norm error = %g max norm error = %g\n", step, (double)time, (double)en2s, (double)enmax)); 375 376 /* 377 Print debugging information if desired 378 */ 379 if (appctx->debug) { 380 PetscCall(PetscPrintf(appctx->comm, "Error vector\n")); 381 PetscCall(VecView(appctx->solution, PETSC_VIEWER_STDOUT_WORLD)); 382 } 383 return 0; 384 } 385 /* --------------------------------------------------------------------- */ 386 /* 387 RHSFunction - User-provided routine that evalues the right-hand-side 388 function of the ODE. This routine is set in the main program by 389 calling TSSetRHSFunction(). We compute: 390 global_out = F(global_in) 391 392 Input Parameters: 393 ts - timesteping context 394 t - current time 395 global_in - vector containing the current iterate 396 ctx - (optional) user-provided context for function evaluation. 397 In this case we use the appctx defined above. 398 399 Output Parameter: 400 global_out - vector containing the newly evaluated function 401 */ 402 PetscErrorCode RHSFunction(TS ts, PetscReal t, Vec global_in, Vec global_out, void *ctx) { 403 AppCtx *appctx = (AppCtx *)ctx; /* user-defined application context */ 404 DM da = appctx->da; /* distributed array */ 405 Vec local_in = appctx->u_local; /* local ghosted input vector */ 406 Vec localwork = appctx->localwork; /* local ghosted work vector */ 407 PetscInt i, localsize; 408 PetscMPIInt rank, size; 409 PetscScalar *copyptr, sc; 410 const PetscScalar *localptr; 411 412 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 413 Get ready for local function computations 414 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 415 /* 416 Scatter ghost points to local vector, using the 2-step process 417 DMGlobalToLocalBegin(), DMGlobalToLocalEnd(). 418 By placing code between these two statements, computations can be 419 done while messages are in transition. 420 */ 421 PetscCall(DMGlobalToLocalBegin(da, global_in, INSERT_VALUES, local_in)); 422 PetscCall(DMGlobalToLocalEnd(da, global_in, INSERT_VALUES, local_in)); 423 424 /* 425 Access directly the values in our local INPUT work array 426 */ 427 PetscCall(VecGetArrayRead(local_in, &localptr)); 428 429 /* 430 Access directly the values in our local OUTPUT work array 431 */ 432 PetscCall(VecGetArray(localwork, ©ptr)); 433 434 sc = 1.0 / (appctx->h * appctx->h * 2.0 * (1.0 + t) * (1.0 + t)); 435 436 /* 437 Evaluate our function on the nodes owned by this processor 438 */ 439 PetscCall(VecGetLocalSize(local_in, &localsize)); 440 441 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 442 Compute entries for the locally owned part 443 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 444 445 /* 446 Handle boundary conditions: This is done by using the boundary condition 447 u(t,boundary) = g(t,boundary) 448 for some function g. Now take the derivative with respect to t to obtain 449 u_{t}(t,boundary) = g_{t}(t,boundary) 450 451 In our case, u(t,0) = t + 1, so that u_{t}(t,0) = 1 452 and u(t,1) = 2t+ 2, so that u_{t}(t,1) = 2 453 */ 454 PetscCallMPI(MPI_Comm_rank(appctx->comm, &rank)); 455 PetscCallMPI(MPI_Comm_size(appctx->comm, &size)); 456 if (rank == 0) copyptr[0] = 1.0; 457 if (rank == size - 1) copyptr[localsize - 1] = 2.0; 458 459 /* 460 Handle the interior nodes where the PDE is replace by finite 461 difference operators. 462 */ 463 for (i = 1; i < localsize - 1; i++) copyptr[i] = localptr[i] * sc * (localptr[i + 1] + localptr[i - 1] - 2.0 * localptr[i]); 464 465 /* 466 Restore vectors 467 */ 468 PetscCall(VecRestoreArrayRead(local_in, &localptr)); 469 PetscCall(VecRestoreArray(localwork, ©ptr)); 470 471 /* 472 Insert values from the local OUTPUT vector into the global 473 output vector 474 */ 475 PetscCall(DMLocalToGlobalBegin(da, localwork, INSERT_VALUES, global_out)); 476 PetscCall(DMLocalToGlobalEnd(da, localwork, INSERT_VALUES, global_out)); 477 478 /* Print debugging information if desired */ 479 if (appctx->debug) { 480 PetscCall(PetscPrintf(appctx->comm, "RHS function vector\n")); 481 PetscCall(VecView(global_out, PETSC_VIEWER_STDOUT_WORLD)); 482 } 483 484 return 0; 485 } 486 /* --------------------------------------------------------------------- */ 487 /* 488 RHSJacobian - User-provided routine to compute the Jacobian of 489 the nonlinear right-hand-side function of the ODE. 490 491 Input Parameters: 492 ts - the TS context 493 t - current time 494 global_in - global input vector 495 dummy - optional user-defined context, as set by TSetRHSJacobian() 496 497 Output Parameters: 498 AA - Jacobian matrix 499 BB - optionally different preconditioning matrix 500 str - flag indicating matrix structure 501 502 Notes: 503 RHSJacobian computes entries for the locally owned part of the Jacobian. 504 - Currently, all PETSc parallel matrix formats are partitioned by 505 contiguous chunks of rows across the processors. 506 - Each processor needs to insert only elements that it owns 507 locally (but any non-local elements will be sent to the 508 appropriate processor during matrix assembly). 509 - Always specify global row and columns of matrix entries when 510 using MatSetValues(). 511 - Here, we set all entries for a particular row at once. 512 - Note that MatSetValues() uses 0-based row and column numbers 513 in Fortran as well as in C. 514 */ 515 PetscErrorCode RHSJacobian(TS ts, PetscReal t, Vec global_in, Mat AA, Mat BB, void *ctx) { 516 AppCtx *appctx = (AppCtx *)ctx; /* user-defined application context */ 517 Vec local_in = appctx->u_local; /* local ghosted input vector */ 518 DM da = appctx->da; /* distributed array */ 519 PetscScalar v[3], sc; 520 const PetscScalar *localptr; 521 PetscInt i, mstart, mend, mstarts, mends, idx[3], is; 522 523 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 524 Get ready for local Jacobian computations 525 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 526 /* 527 Scatter ghost points to local vector, using the 2-step process 528 DMGlobalToLocalBegin(), DMGlobalToLocalEnd(). 529 By placing code between these two statements, computations can be 530 done while messages are in transition. 531 */ 532 PetscCall(DMGlobalToLocalBegin(da, global_in, INSERT_VALUES, local_in)); 533 PetscCall(DMGlobalToLocalEnd(da, global_in, INSERT_VALUES, local_in)); 534 535 /* 536 Get pointer to vector data 537 */ 538 PetscCall(VecGetArrayRead(local_in, &localptr)); 539 540 /* 541 Get starting and ending locally owned rows of the matrix 542 */ 543 PetscCall(MatGetOwnershipRange(BB, &mstarts, &mends)); 544 mstart = mstarts; 545 mend = mends; 546 547 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 548 Compute entries for the locally owned part of the Jacobian. 549 - Currently, all PETSc parallel matrix formats are partitioned by 550 contiguous chunks of rows across the processors. 551 - Each processor needs to insert only elements that it owns 552 locally (but any non-local elements will be sent to the 553 appropriate processor during matrix assembly). 554 - Here, we set all entries for a particular row at once. 555 - We can set matrix entries either using either 556 MatSetValuesLocal() or MatSetValues(). 557 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 558 559 /* 560 Set matrix rows corresponding to boundary data 561 */ 562 if (mstart == 0) { 563 v[0] = 0.0; 564 PetscCall(MatSetValues(BB, 1, &mstart, 1, &mstart, v, INSERT_VALUES)); 565 mstart++; 566 } 567 if (mend == appctx->m) { 568 mend--; 569 v[0] = 0.0; 570 PetscCall(MatSetValues(BB, 1, &mend, 1, &mend, v, INSERT_VALUES)); 571 } 572 573 /* 574 Set matrix rows corresponding to interior data. We construct the 575 matrix one row at a time. 576 */ 577 sc = 1.0 / (appctx->h * appctx->h * 2.0 * (1.0 + t) * (1.0 + t)); 578 for (i = mstart; i < mend; i++) { 579 idx[0] = i - 1; 580 idx[1] = i; 581 idx[2] = i + 1; 582 is = i - mstart + 1; 583 v[0] = sc * localptr[is]; 584 v[1] = sc * (localptr[is + 1] + localptr[is - 1] - 4.0 * localptr[is]); 585 v[2] = sc * localptr[is]; 586 PetscCall(MatSetValues(BB, 1, &i, 3, idx, v, INSERT_VALUES)); 587 } 588 589 /* 590 Restore vector 591 */ 592 PetscCall(VecRestoreArrayRead(local_in, &localptr)); 593 594 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 595 Complete the matrix assembly process and set some options 596 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 597 /* 598 Assemble matrix, using the 2-step process: 599 MatAssemblyBegin(), MatAssemblyEnd() 600 Computations can be done while messages are in transition 601 by placing code between these two statements. 602 */ 603 PetscCall(MatAssemblyBegin(BB, MAT_FINAL_ASSEMBLY)); 604 PetscCall(MatAssemblyEnd(BB, MAT_FINAL_ASSEMBLY)); 605 if (BB != AA) { 606 PetscCall(MatAssemblyBegin(AA, MAT_FINAL_ASSEMBLY)); 607 PetscCall(MatAssemblyEnd(AA, MAT_FINAL_ASSEMBLY)); 608 } 609 610 /* 611 Set and option to indicate that we will never add a new nonzero location 612 to the matrix. If we do, it will generate an error. 613 */ 614 PetscCall(MatSetOption(BB, MAT_NEW_NONZERO_LOCATION_ERR, PETSC_TRUE)); 615 616 return 0; 617 } 618 619 /*TEST 620 621 test: 622 args: -nox -ts_dt 10 -mymonitor 623 nsize: 2 624 requires: !single 625 626 test: 627 suffix: tut_1 628 nsize: 1 629 args: -ts_max_steps 10 -ts_monitor 630 631 test: 632 suffix: tut_2 633 nsize: 4 634 args: -ts_max_steps 10 -ts_monitor -snes_monitor -ksp_monitor 635 636 test: 637 suffix: tut_3 638 nsize: 4 639 args: -ts_max_steps 10 -ts_monitor -M 128 640 641 TEST*/ 642