1 2 static char help[] = "Solves a time-dependent nonlinear PDE. Uses implicit\n\ 3 timestepping. Runtime options include:\n\ 4 -M <xg>, where <xg> = number of grid points\n\ 5 -debug : Activate debugging printouts\n\ 6 -nox : Deactivate x-window graphics\n\n"; 7 8 /* ------------------------------------------------------------------------ 9 10 This program solves the PDE 11 12 u * u_xx 13 u_t = --------- 14 2*(t+1)^2 15 16 on the domain 0 <= x <= 1, with boundary conditions 17 u(t,0) = t + 1, u(t,1) = 2*t + 2, 18 and initial condition 19 u(0,x) = 1 + x*x. 20 21 The exact solution is: 22 u(t,x) = (1 + x*x) * (1 + t) 23 24 Note that since the solution is linear in time and quadratic in x, 25 the finite difference scheme actually computes the "exact" solution. 26 27 We use by default the backward Euler method. 28 29 ------------------------------------------------------------------------- */ 30 31 /* 32 Include "petscts.h" to use the PETSc timestepping routines. Note that 33 this file automatically includes "petscsys.h" and other lower-level 34 PETSc include files. 35 36 Include the "petscdmda.h" to allow us to use the distributed array data 37 structures to manage the parallel grid. 38 */ 39 #include <petscts.h> 40 #include <petscdm.h> 41 #include <petscdmda.h> 42 #include <petscdraw.h> 43 44 /* 45 User-defined application context - contains data needed by the 46 application-provided callback routines. 47 */ 48 typedef struct { 49 MPI_Comm comm; /* communicator */ 50 DM da; /* distributed array data structure */ 51 Vec localwork; /* local ghosted work vector */ 52 Vec u_local; /* local ghosted approximate solution vector */ 53 Vec solution; /* global exact solution vector */ 54 PetscInt m; /* total number of grid points */ 55 PetscReal h; /* mesh width: h = 1/(m-1) */ 56 PetscBool debug; /* flag (1 indicates activation of debugging printouts) */ 57 } AppCtx; 58 59 /* 60 User-defined routines, provided below. 61 */ 62 extern PetscErrorCode InitialConditions(Vec, AppCtx *); 63 extern PetscErrorCode RHSFunction(TS, PetscReal, Vec, Vec, void *); 64 extern PetscErrorCode RHSJacobian(TS, PetscReal, Vec, Mat, Mat, void *); 65 extern PetscErrorCode Monitor(TS, PetscInt, PetscReal, Vec, void *); 66 extern PetscErrorCode ExactSolution(PetscReal, Vec, AppCtx *); 67 68 int main(int argc, char **argv) 69 { 70 AppCtx appctx; /* user-defined application context */ 71 TS ts; /* timestepping context */ 72 Mat A; /* Jacobian matrix data structure */ 73 Vec u; /* approximate solution vector */ 74 PetscInt time_steps_max = 100; /* default max timesteps */ 75 PetscReal dt; 76 PetscReal time_total_max = 100.0; /* default max total time */ 77 PetscBool mymonitor = PETSC_FALSE; 78 PetscReal bounds[] = {1.0, 3.3}; 79 80 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 81 Initialize program and set problem parameters 82 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 83 84 PetscFunctionBeginUser; 85 PetscCall(PetscInitialize(&argc, &argv, (char *)0, help)); 86 PetscCall(PetscViewerDrawSetBounds(PETSC_VIEWER_DRAW_(PETSC_COMM_WORLD), 1, bounds)); 87 88 appctx.comm = PETSC_COMM_WORLD; 89 appctx.m = 60; 90 91 PetscCall(PetscOptionsGetInt(NULL, NULL, "-M", &appctx.m, NULL)); 92 PetscCall(PetscOptionsHasName(NULL, NULL, "-debug", &appctx.debug)); 93 PetscCall(PetscOptionsHasName(NULL, NULL, "-mymonitor", &mymonitor)); 94 95 appctx.h = 1.0 / (appctx.m - 1.0); 96 97 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 98 Create vector data structures 99 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 100 101 /* 102 Create distributed array (DMDA) to manage parallel grid and vectors 103 and to set up the ghost point communication pattern. There are M 104 total grid values spread equally among all the processors. 105 */ 106 PetscCall(DMDACreate1d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, appctx.m, 1, 1, NULL, &appctx.da)); 107 PetscCall(DMSetFromOptions(appctx.da)); 108 PetscCall(DMSetUp(appctx.da)); 109 110 /* 111 Extract global and local vectors from DMDA; we use these to store the 112 approximate solution. Then duplicate these for remaining vectors that 113 have the same types. 114 */ 115 PetscCall(DMCreateGlobalVector(appctx.da, &u)); 116 PetscCall(DMCreateLocalVector(appctx.da, &appctx.u_local)); 117 118 /* 119 Create local work vector for use in evaluating right-hand-side function; 120 create global work vector for storing exact solution. 121 */ 122 PetscCall(VecDuplicate(appctx.u_local, &appctx.localwork)); 123 PetscCall(VecDuplicate(u, &appctx.solution)); 124 125 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 126 Create timestepping solver context; set callback routine for 127 right-hand-side function evaluation. 128 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 129 130 PetscCall(TSCreate(PETSC_COMM_WORLD, &ts)); 131 PetscCall(TSSetProblemType(ts, TS_NONLINEAR)); 132 PetscCall(TSSetRHSFunction(ts, NULL, RHSFunction, &appctx)); 133 134 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 135 Set optional user-defined monitoring routine 136 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 137 138 if (mymonitor) PetscCall(TSMonitorSet(ts, Monitor, &appctx, NULL)); 139 140 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 141 For nonlinear problems, the user can provide a Jacobian evaluation 142 routine (or use a finite differencing approximation). 143 144 Create matrix data structure; set Jacobian evaluation routine. 145 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 146 147 PetscCall(MatCreate(PETSC_COMM_WORLD, &A)); 148 PetscCall(MatSetSizes(A, PETSC_DECIDE, PETSC_DECIDE, appctx.m, appctx.m)); 149 PetscCall(MatSetFromOptions(A)); 150 PetscCall(MatSetUp(A)); 151 PetscCall(TSSetRHSJacobian(ts, A, A, RHSJacobian, &appctx)); 152 153 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 154 Set solution vector and initial timestep 155 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 156 157 dt = appctx.h / 2.0; 158 PetscCall(TSSetTimeStep(ts, dt)); 159 160 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 161 Customize timestepping solver: 162 - Set the solution method to be the Backward Euler method. 163 - Set timestepping duration info 164 Then set runtime options, which can override these defaults. 165 For example, 166 -ts_max_steps <maxsteps> -ts_max_time <maxtime> 167 to override the defaults set by TSSetMaxSteps()/TSSetMaxTime(). 168 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 169 170 PetscCall(TSSetType(ts, TSBEULER)); 171 PetscCall(TSSetMaxSteps(ts, time_steps_max)); 172 PetscCall(TSSetMaxTime(ts, time_total_max)); 173 PetscCall(TSSetExactFinalTime(ts, TS_EXACTFINALTIME_STEPOVER)); 174 PetscCall(TSSetFromOptions(ts)); 175 176 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 177 Solve the problem 178 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 179 180 /* 181 Evaluate initial conditions 182 */ 183 PetscCall(InitialConditions(u, &appctx)); 184 185 /* 186 Run the timestepping solver 187 */ 188 PetscCall(TSSolve(ts, u)); 189 190 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 191 Free work space. All PETSc objects should be destroyed when they 192 are no longer needed. 193 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 194 195 PetscCall(TSDestroy(&ts)); 196 PetscCall(VecDestroy(&u)); 197 PetscCall(MatDestroy(&A)); 198 PetscCall(DMDestroy(&appctx.da)); 199 PetscCall(VecDestroy(&appctx.localwork)); 200 PetscCall(VecDestroy(&appctx.solution)); 201 PetscCall(VecDestroy(&appctx.u_local)); 202 203 /* 204 Always call PetscFinalize() before exiting a program. This routine 205 - finalizes the PETSc libraries as well as MPI 206 - provides summary and diagnostic information if certain runtime 207 options are chosen (e.g., -log_view). 208 */ 209 PetscCall(PetscFinalize()); 210 return 0; 211 } 212 /* --------------------------------------------------------------------- */ 213 /* 214 InitialConditions - Computes the solution at the initial time. 215 216 Input Parameters: 217 u - uninitialized solution vector (global) 218 appctx - user-defined application context 219 220 Output Parameter: 221 u - vector with solution at initial time (global) 222 */ 223 PetscErrorCode InitialConditions(Vec u, AppCtx *appctx) 224 { 225 PetscScalar *u_localptr, h = appctx->h, x; 226 PetscInt i, mybase, myend; 227 228 /* 229 Determine starting point of each processor's range of 230 grid values. 231 */ 232 PetscCall(VecGetOwnershipRange(u, &mybase, &myend)); 233 234 /* 235 Get a pointer to vector data. 236 - For default PETSc vectors, VecGetArray() returns a pointer to 237 the data array. Otherwise, the routine is implementation dependent. 238 - You MUST call VecRestoreArray() when you no longer need access to 239 the array. 240 - Note that the Fortran interface to VecGetArray() differs from the 241 C version. See the users manual for details. 242 */ 243 PetscCall(VecGetArray(u, &u_localptr)); 244 245 /* 246 We initialize the solution array by simply writing the solution 247 directly into the array locations. Alternatively, we could use 248 VecSetValues() or VecSetValuesLocal(). 249 */ 250 for (i = mybase; i < myend; i++) { 251 x = h * (PetscReal)i; /* current location in global grid */ 252 u_localptr[i - mybase] = 1.0 + x * x; 253 } 254 255 /* 256 Restore vector 257 */ 258 PetscCall(VecRestoreArray(u, &u_localptr)); 259 260 /* 261 Print debugging information if desired 262 */ 263 if (appctx->debug) { 264 PetscCall(PetscPrintf(appctx->comm, "initial guess vector\n")); 265 PetscCall(VecView(u, PETSC_VIEWER_STDOUT_WORLD)); 266 } 267 268 return 0; 269 } 270 /* --------------------------------------------------------------------- */ 271 /* 272 ExactSolution - Computes the exact solution at a given time. 273 274 Input Parameters: 275 t - current time 276 solution - vector in which exact solution will be computed 277 appctx - user-defined application context 278 279 Output Parameter: 280 solution - vector with the newly computed exact solution 281 */ 282 PetscErrorCode ExactSolution(PetscReal t, Vec solution, AppCtx *appctx) 283 { 284 PetscScalar *s_localptr, h = appctx->h, x; 285 PetscInt i, mybase, myend; 286 287 /* 288 Determine starting and ending points of each processor's 289 range of grid values 290 */ 291 PetscCall(VecGetOwnershipRange(solution, &mybase, &myend)); 292 293 /* 294 Get a pointer to vector data. 295 */ 296 PetscCall(VecGetArray(solution, &s_localptr)); 297 298 /* 299 Simply write the solution directly into the array locations. 300 Alternatively, we could use VecSetValues() or VecSetValuesLocal(). 301 */ 302 for (i = mybase; i < myend; i++) { 303 x = h * (PetscReal)i; 304 s_localptr[i - mybase] = (t + 1.0) * (1.0 + x * x); 305 } 306 307 /* 308 Restore vector 309 */ 310 PetscCall(VecRestoreArray(solution, &s_localptr)); 311 return 0; 312 } 313 /* --------------------------------------------------------------------- */ 314 /* 315 Monitor - User-provided routine to monitor the solution computed at 316 each timestep. This example plots the solution and computes the 317 error in two different norms. 318 319 Input Parameters: 320 ts - the timestep context 321 step - the count of the current step (with 0 meaning the 322 initial condition) 323 time - the current time 324 u - the solution at this timestep 325 ctx - the user-provided context for this monitoring routine. 326 In this case we use the application context which contains 327 information about the problem size, workspace and the exact 328 solution. 329 */ 330 PetscErrorCode Monitor(TS ts, PetscInt step, PetscReal time, Vec u, void *ctx) 331 { 332 AppCtx *appctx = (AppCtx *)ctx; /* user-defined application context */ 333 PetscReal en2, en2s, enmax; 334 PetscDraw draw; 335 336 /* 337 We use the default X Windows viewer 338 PETSC_VIEWER_DRAW_(appctx->comm) 339 that is associated with the current communicator. This saves 340 the effort of calling PetscViewerDrawOpen() to create the window. 341 Note that if we wished to plot several items in separate windows we 342 would create each viewer with PetscViewerDrawOpen() and store them in 343 the application context, appctx. 344 345 PetscReal buffering makes graphics look better. 346 */ 347 PetscCall(PetscViewerDrawGetDraw(PETSC_VIEWER_DRAW_(appctx->comm), 0, &draw)); 348 PetscCall(PetscDrawSetDoubleBuffer(draw)); 349 PetscCall(VecView(u, PETSC_VIEWER_DRAW_(appctx->comm))); 350 351 /* 352 Compute the exact solution at this timestep 353 */ 354 PetscCall(ExactSolution(time, appctx->solution, appctx)); 355 356 /* 357 Print debugging information if desired 358 */ 359 if (appctx->debug) { 360 PetscCall(PetscPrintf(appctx->comm, "Computed solution vector\n")); 361 PetscCall(VecView(u, PETSC_VIEWER_STDOUT_WORLD)); 362 PetscCall(PetscPrintf(appctx->comm, "Exact solution vector\n")); 363 PetscCall(VecView(appctx->solution, PETSC_VIEWER_STDOUT_WORLD)); 364 } 365 366 /* 367 Compute the 2-norm and max-norm of the error 368 */ 369 PetscCall(VecAXPY(appctx->solution, -1.0, u)); 370 PetscCall(VecNorm(appctx->solution, NORM_2, &en2)); 371 en2s = PetscSqrtReal(appctx->h) * en2; /* scale the 2-norm by the grid spacing */ 372 PetscCall(VecNorm(appctx->solution, NORM_MAX, &enmax)); 373 374 /* 375 PetscPrintf() causes only the first processor in this 376 communicator to print the timestep information. 377 */ 378 PetscCall(PetscPrintf(appctx->comm, "Timestep %" PetscInt_FMT ": time = %g 2-norm error = %g max norm error = %g\n", step, (double)time, (double)en2s, (double)enmax)); 379 380 /* 381 Print debugging information if desired 382 */ 383 if (appctx->debug) { 384 PetscCall(PetscPrintf(appctx->comm, "Error vector\n")); 385 PetscCall(VecView(appctx->solution, PETSC_VIEWER_STDOUT_WORLD)); 386 } 387 return 0; 388 } 389 /* --------------------------------------------------------------------- */ 390 /* 391 RHSFunction - User-provided routine that evalues the right-hand-side 392 function of the ODE. This routine is set in the main program by 393 calling TSSetRHSFunction(). We compute: 394 global_out = F(global_in) 395 396 Input Parameters: 397 ts - timesteping context 398 t - current time 399 global_in - vector containing the current iterate 400 ctx - (optional) user-provided context for function evaluation. 401 In this case we use the appctx defined above. 402 403 Output Parameter: 404 global_out - vector containing the newly evaluated function 405 */ 406 PetscErrorCode RHSFunction(TS ts, PetscReal t, Vec global_in, Vec global_out, void *ctx) 407 { 408 AppCtx *appctx = (AppCtx *)ctx; /* user-defined application context */ 409 DM da = appctx->da; /* distributed array */ 410 Vec local_in = appctx->u_local; /* local ghosted input vector */ 411 Vec localwork = appctx->localwork; /* local ghosted work vector */ 412 PetscInt i, localsize; 413 PetscMPIInt rank, size; 414 PetscScalar *copyptr, sc; 415 const PetscScalar *localptr; 416 417 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 418 Get ready for local function computations 419 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 420 /* 421 Scatter ghost points to local vector, using the 2-step process 422 DMGlobalToLocalBegin(), DMGlobalToLocalEnd(). 423 By placing code between these two statements, computations can be 424 done while messages are in transition. 425 */ 426 PetscCall(DMGlobalToLocalBegin(da, global_in, INSERT_VALUES, local_in)); 427 PetscCall(DMGlobalToLocalEnd(da, global_in, INSERT_VALUES, local_in)); 428 429 /* 430 Access directly the values in our local INPUT work array 431 */ 432 PetscCall(VecGetArrayRead(local_in, &localptr)); 433 434 /* 435 Access directly the values in our local OUTPUT work array 436 */ 437 PetscCall(VecGetArray(localwork, ©ptr)); 438 439 sc = 1.0 / (appctx->h * appctx->h * 2.0 * (1.0 + t) * (1.0 + t)); 440 441 /* 442 Evaluate our function on the nodes owned by this processor 443 */ 444 PetscCall(VecGetLocalSize(local_in, &localsize)); 445 446 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 447 Compute entries for the locally owned part 448 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 449 450 /* 451 Handle boundary conditions: This is done by using the boundary condition 452 u(t,boundary) = g(t,boundary) 453 for some function g. Now take the derivative with respect to t to obtain 454 u_{t}(t,boundary) = g_{t}(t,boundary) 455 456 In our case, u(t,0) = t + 1, so that u_{t}(t,0) = 1 457 and u(t,1) = 2t+ 2, so that u_{t}(t,1) = 2 458 */ 459 PetscCallMPI(MPI_Comm_rank(appctx->comm, &rank)); 460 PetscCallMPI(MPI_Comm_size(appctx->comm, &size)); 461 if (rank == 0) copyptr[0] = 1.0; 462 if (rank == size - 1) copyptr[localsize - 1] = 2.0; 463 464 /* 465 Handle the interior nodes where the PDE is replace by finite 466 difference operators. 467 */ 468 for (i = 1; i < localsize - 1; i++) copyptr[i] = localptr[i] * sc * (localptr[i + 1] + localptr[i - 1] - 2.0 * localptr[i]); 469 470 /* 471 Restore vectors 472 */ 473 PetscCall(VecRestoreArrayRead(local_in, &localptr)); 474 PetscCall(VecRestoreArray(localwork, ©ptr)); 475 476 /* 477 Insert values from the local OUTPUT vector into the global 478 output vector 479 */ 480 PetscCall(DMLocalToGlobalBegin(da, localwork, INSERT_VALUES, global_out)); 481 PetscCall(DMLocalToGlobalEnd(da, localwork, INSERT_VALUES, global_out)); 482 483 /* Print debugging information if desired */ 484 if (appctx->debug) { 485 PetscCall(PetscPrintf(appctx->comm, "RHS function vector\n")); 486 PetscCall(VecView(global_out, PETSC_VIEWER_STDOUT_WORLD)); 487 } 488 489 return 0; 490 } 491 /* --------------------------------------------------------------------- */ 492 /* 493 RHSJacobian - User-provided routine to compute the Jacobian of 494 the nonlinear right-hand-side function of the ODE. 495 496 Input Parameters: 497 ts - the TS context 498 t - current time 499 global_in - global input vector 500 dummy - optional user-defined context, as set by TSetRHSJacobian() 501 502 Output Parameters: 503 AA - Jacobian matrix 504 BB - optionally different preconditioning matrix 505 str - flag indicating matrix structure 506 507 Notes: 508 RHSJacobian computes entries for the locally owned part of the Jacobian. 509 - Currently, all PETSc parallel matrix formats are partitioned by 510 contiguous chunks of rows across the processors. 511 - Each processor needs to insert only elements that it owns 512 locally (but any non-local elements will be sent to the 513 appropriate processor during matrix assembly). 514 - Always specify global row and columns of matrix entries when 515 using MatSetValues(). 516 - Here, we set all entries for a particular row at once. 517 - Note that MatSetValues() uses 0-based row and column numbers 518 in Fortran as well as in C. 519 */ 520 PetscErrorCode RHSJacobian(TS ts, PetscReal t, Vec global_in, Mat AA, Mat BB, void *ctx) 521 { 522 AppCtx *appctx = (AppCtx *)ctx; /* user-defined application context */ 523 Vec local_in = appctx->u_local; /* local ghosted input vector */ 524 DM da = appctx->da; /* distributed array */ 525 PetscScalar v[3], sc; 526 const PetscScalar *localptr; 527 PetscInt i, mstart, mend, mstarts, mends, idx[3], is; 528 529 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 530 Get ready for local Jacobian computations 531 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 532 /* 533 Scatter ghost points to local vector, using the 2-step process 534 DMGlobalToLocalBegin(), DMGlobalToLocalEnd(). 535 By placing code between these two statements, computations can be 536 done while messages are in transition. 537 */ 538 PetscCall(DMGlobalToLocalBegin(da, global_in, INSERT_VALUES, local_in)); 539 PetscCall(DMGlobalToLocalEnd(da, global_in, INSERT_VALUES, local_in)); 540 541 /* 542 Get pointer to vector data 543 */ 544 PetscCall(VecGetArrayRead(local_in, &localptr)); 545 546 /* 547 Get starting and ending locally owned rows of the matrix 548 */ 549 PetscCall(MatGetOwnershipRange(BB, &mstarts, &mends)); 550 mstart = mstarts; 551 mend = mends; 552 553 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 554 Compute entries for the locally owned part of the Jacobian. 555 - Currently, all PETSc parallel matrix formats are partitioned by 556 contiguous chunks of rows across the processors. 557 - Each processor needs to insert only elements that it owns 558 locally (but any non-local elements will be sent to the 559 appropriate processor during matrix assembly). 560 - Here, we set all entries for a particular row at once. 561 - We can set matrix entries either using either 562 MatSetValuesLocal() or MatSetValues(). 563 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 564 565 /* 566 Set matrix rows corresponding to boundary data 567 */ 568 if (mstart == 0) { 569 v[0] = 0.0; 570 PetscCall(MatSetValues(BB, 1, &mstart, 1, &mstart, v, INSERT_VALUES)); 571 mstart++; 572 } 573 if (mend == appctx->m) { 574 mend--; 575 v[0] = 0.0; 576 PetscCall(MatSetValues(BB, 1, &mend, 1, &mend, v, INSERT_VALUES)); 577 } 578 579 /* 580 Set matrix rows corresponding to interior data. We construct the 581 matrix one row at a time. 582 */ 583 sc = 1.0 / (appctx->h * appctx->h * 2.0 * (1.0 + t) * (1.0 + t)); 584 for (i = mstart; i < mend; i++) { 585 idx[0] = i - 1; 586 idx[1] = i; 587 idx[2] = i + 1; 588 is = i - mstart + 1; 589 v[0] = sc * localptr[is]; 590 v[1] = sc * (localptr[is + 1] + localptr[is - 1] - 4.0 * localptr[is]); 591 v[2] = sc * localptr[is]; 592 PetscCall(MatSetValues(BB, 1, &i, 3, idx, v, INSERT_VALUES)); 593 } 594 595 /* 596 Restore vector 597 */ 598 PetscCall(VecRestoreArrayRead(local_in, &localptr)); 599 600 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 601 Complete the matrix assembly process and set some options 602 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 603 /* 604 Assemble matrix, using the 2-step process: 605 MatAssemblyBegin(), MatAssemblyEnd() 606 Computations can be done while messages are in transition 607 by placing code between these two statements. 608 */ 609 PetscCall(MatAssemblyBegin(BB, MAT_FINAL_ASSEMBLY)); 610 PetscCall(MatAssemblyEnd(BB, MAT_FINAL_ASSEMBLY)); 611 if (BB != AA) { 612 PetscCall(MatAssemblyBegin(AA, MAT_FINAL_ASSEMBLY)); 613 PetscCall(MatAssemblyEnd(AA, MAT_FINAL_ASSEMBLY)); 614 } 615 616 /* 617 Set and option to indicate that we will never add a new nonzero location 618 to the matrix. If we do, it will generate an error. 619 */ 620 PetscCall(MatSetOption(BB, MAT_NEW_NONZERO_LOCATION_ERR, PETSC_TRUE)); 621 622 return 0; 623 } 624 625 /*TEST 626 627 test: 628 args: -nox -ts_dt 10 -mymonitor 629 nsize: 2 630 requires: !single 631 632 test: 633 suffix: tut_1 634 nsize: 1 635 args: -ts_max_steps 10 -ts_monitor 636 637 test: 638 suffix: tut_2 639 nsize: 4 640 args: -ts_max_steps 10 -ts_monitor -snes_monitor -ksp_monitor 641 642 test: 643 suffix: tut_3 644 nsize: 4 645 args: -ts_max_steps 10 -ts_monitor -M 128 646 647 TEST*/ 648