1 2 static char help[] ="Solves a time-dependent nonlinear PDE. Uses implicit\n\ 3 timestepping. Runtime options include:\n\ 4 -M <xg>, where <xg> = number of grid points\n\ 5 -debug : Activate debugging printouts\n\ 6 -nox : Deactivate x-window graphics\n\n"; 7 8 /* ------------------------------------------------------------------------ 9 10 This program solves the PDE 11 12 u * u_xx 13 u_t = --------- 14 2*(t+1)^2 15 16 on the domain 0 <= x <= 1, with boundary conditions 17 u(t,0) = t + 1, u(t,1) = 2*t + 2, 18 and initial condition 19 u(0,x) = 1 + x*x. 20 21 The exact solution is: 22 u(t,x) = (1 + x*x) * (1 + t) 23 24 Note that since the solution is linear in time and quadratic in x, 25 the finite difference scheme actually computes the "exact" solution. 26 27 We use by default the backward Euler method. 28 29 ------------------------------------------------------------------------- */ 30 31 /* 32 Include "petscts.h" to use the PETSc timestepping routines. Note that 33 this file automatically includes "petscsys.h" and other lower-level 34 PETSc include files. 35 36 Include the "petscdmda.h" to allow us to use the distributed array data 37 structures to manage the parallel grid. 38 */ 39 #include <petscts.h> 40 #include <petscdm.h> 41 #include <petscdmda.h> 42 #include <petscdraw.h> 43 44 /* 45 User-defined application context - contains data needed by the 46 application-provided callback routines. 47 */ 48 typedef struct { 49 MPI_Comm comm; /* communicator */ 50 DM da; /* distributed array data structure */ 51 Vec localwork; /* local ghosted work vector */ 52 Vec u_local; /* local ghosted approximate solution vector */ 53 Vec solution; /* global exact solution vector */ 54 PetscInt m; /* total number of grid points */ 55 PetscReal h; /* mesh width: h = 1/(m-1) */ 56 PetscBool debug; /* flag (1 indicates activation of debugging printouts) */ 57 } AppCtx; 58 59 /* 60 User-defined routines, provided below. 61 */ 62 extern PetscErrorCode InitialConditions(Vec,AppCtx*); 63 extern PetscErrorCode RHSFunction(TS,PetscReal,Vec,Vec,void*); 64 extern PetscErrorCode RHSJacobian(TS,PetscReal,Vec,Mat,Mat,void*); 65 extern PetscErrorCode Monitor(TS,PetscInt,PetscReal,Vec,void*); 66 extern PetscErrorCode ExactSolution(PetscReal,Vec,AppCtx*); 67 68 int main(int argc,char **argv) 69 { 70 AppCtx appctx; /* user-defined application context */ 71 TS ts; /* timestepping context */ 72 Mat A; /* Jacobian matrix data structure */ 73 Vec u; /* approximate solution vector */ 74 PetscInt time_steps_max = 100; /* default max timesteps */ 75 PetscReal dt; 76 PetscReal time_total_max = 100.0; /* default max total time */ 77 PetscBool mymonitor = PETSC_FALSE; 78 PetscReal bounds[] = {1.0, 3.3}; 79 80 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 81 Initialize program and set problem parameters 82 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 83 84 PetscFunctionBeginUser; 85 PetscCall(PetscInitialize(&argc,&argv,(char*)0,help)); 86 PetscCall(PetscViewerDrawSetBounds(PETSC_VIEWER_DRAW_(PETSC_COMM_WORLD),1,bounds)); 87 88 appctx.comm = PETSC_COMM_WORLD; 89 appctx.m = 60; 90 91 PetscCall(PetscOptionsGetInt(NULL,NULL,"-M",&appctx.m,NULL)); 92 PetscCall(PetscOptionsHasName(NULL,NULL,"-debug",&appctx.debug)); 93 PetscCall(PetscOptionsHasName(NULL,NULL,"-mymonitor",&mymonitor)); 94 95 appctx.h = 1.0/(appctx.m-1.0); 96 97 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 98 Create vector data structures 99 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 100 101 /* 102 Create distributed array (DMDA) to manage parallel grid and vectors 103 and to set up the ghost point communication pattern. There are M 104 total grid values spread equally among all the processors. 105 */ 106 PetscCall(DMDACreate1d(PETSC_COMM_WORLD,DM_BOUNDARY_NONE,appctx.m,1,1,NULL,&appctx.da)); 107 PetscCall(DMSetFromOptions(appctx.da)); 108 PetscCall(DMSetUp(appctx.da)); 109 110 /* 111 Extract global and local vectors from DMDA; we use these to store the 112 approximate solution. Then duplicate these for remaining vectors that 113 have the same types. 114 */ 115 PetscCall(DMCreateGlobalVector(appctx.da,&u)); 116 PetscCall(DMCreateLocalVector(appctx.da,&appctx.u_local)); 117 118 /* 119 Create local work vector for use in evaluating right-hand-side function; 120 create global work vector for storing exact solution. 121 */ 122 PetscCall(VecDuplicate(appctx.u_local,&appctx.localwork)); 123 PetscCall(VecDuplicate(u,&appctx.solution)); 124 125 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 126 Create timestepping solver context; set callback routine for 127 right-hand-side function evaluation. 128 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 129 130 PetscCall(TSCreate(PETSC_COMM_WORLD,&ts)); 131 PetscCall(TSSetProblemType(ts,TS_NONLINEAR)); 132 PetscCall(TSSetRHSFunction(ts,NULL,RHSFunction,&appctx)); 133 134 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 135 Set optional user-defined monitoring routine 136 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 137 138 if (mymonitor) { 139 PetscCall(TSMonitorSet(ts,Monitor,&appctx,NULL)); 140 } 141 142 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 143 For nonlinear problems, the user can provide a Jacobian evaluation 144 routine (or use a finite differencing approximation). 145 146 Create matrix data structure; set Jacobian evaluation routine. 147 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 148 149 PetscCall(MatCreate(PETSC_COMM_WORLD,&A)); 150 PetscCall(MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,appctx.m,appctx.m)); 151 PetscCall(MatSetFromOptions(A)); 152 PetscCall(MatSetUp(A)); 153 PetscCall(TSSetRHSJacobian(ts,A,A,RHSJacobian,&appctx)); 154 155 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 156 Set solution vector and initial timestep 157 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 158 159 dt = appctx.h/2.0; 160 PetscCall(TSSetTimeStep(ts,dt)); 161 162 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 163 Customize timestepping solver: 164 - Set the solution method to be the Backward Euler method. 165 - Set timestepping duration info 166 Then set runtime options, which can override these defaults. 167 For example, 168 -ts_max_steps <maxsteps> -ts_max_time <maxtime> 169 to override the defaults set by TSSetMaxSteps()/TSSetMaxTime(). 170 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 171 172 PetscCall(TSSetType(ts,TSBEULER)); 173 PetscCall(TSSetMaxSteps(ts,time_steps_max)); 174 PetscCall(TSSetMaxTime(ts,time_total_max)); 175 PetscCall(TSSetExactFinalTime(ts,TS_EXACTFINALTIME_STEPOVER)); 176 PetscCall(TSSetFromOptions(ts)); 177 178 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 179 Solve the problem 180 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 181 182 /* 183 Evaluate initial conditions 184 */ 185 PetscCall(InitialConditions(u,&appctx)); 186 187 /* 188 Run the timestepping solver 189 */ 190 PetscCall(TSSolve(ts,u)); 191 192 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 193 Free work space. All PETSc objects should be destroyed when they 194 are no longer needed. 195 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 196 197 PetscCall(TSDestroy(&ts)); 198 PetscCall(VecDestroy(&u)); 199 PetscCall(MatDestroy(&A)); 200 PetscCall(DMDestroy(&appctx.da)); 201 PetscCall(VecDestroy(&appctx.localwork)); 202 PetscCall(VecDestroy(&appctx.solution)); 203 PetscCall(VecDestroy(&appctx.u_local)); 204 205 /* 206 Always call PetscFinalize() before exiting a program. This routine 207 - finalizes the PETSc libraries as well as MPI 208 - provides summary and diagnostic information if certain runtime 209 options are chosen (e.g., -log_view). 210 */ 211 PetscCall(PetscFinalize()); 212 return 0; 213 } 214 /* --------------------------------------------------------------------- */ 215 /* 216 InitialConditions - Computes the solution at the initial time. 217 218 Input Parameters: 219 u - uninitialized solution vector (global) 220 appctx - user-defined application context 221 222 Output Parameter: 223 u - vector with solution at initial time (global) 224 */ 225 PetscErrorCode InitialConditions(Vec u,AppCtx *appctx) 226 { 227 PetscScalar *u_localptr,h = appctx->h,x; 228 PetscInt i,mybase,myend; 229 230 /* 231 Determine starting point of each processor's range of 232 grid values. 233 */ 234 PetscCall(VecGetOwnershipRange(u,&mybase,&myend)); 235 236 /* 237 Get a pointer to vector data. 238 - For default PETSc vectors, VecGetArray() returns a pointer to 239 the data array. Otherwise, the routine is implementation dependent. 240 - You MUST call VecRestoreArray() when you no longer need access to 241 the array. 242 - Note that the Fortran interface to VecGetArray() differs from the 243 C version. See the users manual for details. 244 */ 245 PetscCall(VecGetArray(u,&u_localptr)); 246 247 /* 248 We initialize the solution array by simply writing the solution 249 directly into the array locations. Alternatively, we could use 250 VecSetValues() or VecSetValuesLocal(). 251 */ 252 for (i=mybase; i<myend; i++) { 253 x = h*(PetscReal)i; /* current location in global grid */ 254 u_localptr[i-mybase] = 1.0 + x*x; 255 } 256 257 /* 258 Restore vector 259 */ 260 PetscCall(VecRestoreArray(u,&u_localptr)); 261 262 /* 263 Print debugging information if desired 264 */ 265 if (appctx->debug) { 266 PetscCall(PetscPrintf(appctx->comm,"initial guess vector\n")); 267 PetscCall(VecView(u,PETSC_VIEWER_STDOUT_WORLD)); 268 } 269 270 return 0; 271 } 272 /* --------------------------------------------------------------------- */ 273 /* 274 ExactSolution - Computes the exact solution at a given time. 275 276 Input Parameters: 277 t - current time 278 solution - vector in which exact solution will be computed 279 appctx - user-defined application context 280 281 Output Parameter: 282 solution - vector with the newly computed exact solution 283 */ 284 PetscErrorCode ExactSolution(PetscReal t,Vec solution,AppCtx *appctx) 285 { 286 PetscScalar *s_localptr,h = appctx->h,x; 287 PetscInt i,mybase,myend; 288 289 /* 290 Determine starting and ending points of each processor's 291 range of grid values 292 */ 293 PetscCall(VecGetOwnershipRange(solution,&mybase,&myend)); 294 295 /* 296 Get a pointer to vector data. 297 */ 298 PetscCall(VecGetArray(solution,&s_localptr)); 299 300 /* 301 Simply write the solution directly into the array locations. 302 Alternatively, we could use VecSetValues() or VecSetValuesLocal(). 303 */ 304 for (i=mybase; i<myend; i++) { 305 x = h*(PetscReal)i; 306 s_localptr[i-mybase] = (t + 1.0)*(1.0 + x*x); 307 } 308 309 /* 310 Restore vector 311 */ 312 PetscCall(VecRestoreArray(solution,&s_localptr)); 313 return 0; 314 } 315 /* --------------------------------------------------------------------- */ 316 /* 317 Monitor - User-provided routine to monitor the solution computed at 318 each timestep. This example plots the solution and computes the 319 error in two different norms. 320 321 Input Parameters: 322 ts - the timestep context 323 step - the count of the current step (with 0 meaning the 324 initial condition) 325 time - the current time 326 u - the solution at this timestep 327 ctx - the user-provided context for this monitoring routine. 328 In this case we use the application context which contains 329 information about the problem size, workspace and the exact 330 solution. 331 */ 332 PetscErrorCode Monitor(TS ts,PetscInt step,PetscReal time,Vec u,void *ctx) 333 { 334 AppCtx *appctx = (AppCtx*) ctx; /* user-defined application context */ 335 PetscReal en2,en2s,enmax; 336 PetscDraw draw; 337 338 /* 339 We use the default X Windows viewer 340 PETSC_VIEWER_DRAW_(appctx->comm) 341 that is associated with the current communicator. This saves 342 the effort of calling PetscViewerDrawOpen() to create the window. 343 Note that if we wished to plot several items in separate windows we 344 would create each viewer with PetscViewerDrawOpen() and store them in 345 the application context, appctx. 346 347 PetscReal buffering makes graphics look better. 348 */ 349 PetscCall(PetscViewerDrawGetDraw(PETSC_VIEWER_DRAW_(appctx->comm),0,&draw)); 350 PetscCall(PetscDrawSetDoubleBuffer(draw)); 351 PetscCall(VecView(u,PETSC_VIEWER_DRAW_(appctx->comm))); 352 353 /* 354 Compute the exact solution at this timestep 355 */ 356 PetscCall(ExactSolution(time,appctx->solution,appctx)); 357 358 /* 359 Print debugging information if desired 360 */ 361 if (appctx->debug) { 362 PetscCall(PetscPrintf(appctx->comm,"Computed solution vector\n")); 363 PetscCall(VecView(u,PETSC_VIEWER_STDOUT_WORLD)); 364 PetscCall(PetscPrintf(appctx->comm,"Exact solution vector\n")); 365 PetscCall(VecView(appctx->solution,PETSC_VIEWER_STDOUT_WORLD)); 366 } 367 368 /* 369 Compute the 2-norm and max-norm of the error 370 */ 371 PetscCall(VecAXPY(appctx->solution,-1.0,u)); 372 PetscCall(VecNorm(appctx->solution,NORM_2,&en2)); 373 en2s = PetscSqrtReal(appctx->h)*en2; /* scale the 2-norm by the grid spacing */ 374 PetscCall(VecNorm(appctx->solution,NORM_MAX,&enmax)); 375 376 /* 377 PetscPrintf() causes only the first processor in this 378 communicator to print the timestep information. 379 */ 380 PetscCall(PetscPrintf(appctx->comm,"Timestep %" PetscInt_FMT ": time = %g 2-norm error = %g max norm error = %g\n",step,(double)time,(double)en2s,(double)enmax)); 381 382 /* 383 Print debugging information if desired 384 */ 385 if (appctx->debug) { 386 PetscCall(PetscPrintf(appctx->comm,"Error vector\n")); 387 PetscCall(VecView(appctx->solution,PETSC_VIEWER_STDOUT_WORLD)); 388 } 389 return 0; 390 } 391 /* --------------------------------------------------------------------- */ 392 /* 393 RHSFunction - User-provided routine that evalues the right-hand-side 394 function of the ODE. This routine is set in the main program by 395 calling TSSetRHSFunction(). We compute: 396 global_out = F(global_in) 397 398 Input Parameters: 399 ts - timesteping context 400 t - current time 401 global_in - vector containing the current iterate 402 ctx - (optional) user-provided context for function evaluation. 403 In this case we use the appctx defined above. 404 405 Output Parameter: 406 global_out - vector containing the newly evaluated function 407 */ 408 PetscErrorCode RHSFunction(TS ts,PetscReal t,Vec global_in,Vec global_out,void *ctx) 409 { 410 AppCtx *appctx = (AppCtx*) ctx; /* user-defined application context */ 411 DM da = appctx->da; /* distributed array */ 412 Vec local_in = appctx->u_local; /* local ghosted input vector */ 413 Vec localwork = appctx->localwork; /* local ghosted work vector */ 414 PetscInt i,localsize; 415 PetscMPIInt rank,size; 416 PetscScalar *copyptr,sc; 417 const PetscScalar *localptr; 418 419 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 420 Get ready for local function computations 421 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 422 /* 423 Scatter ghost points to local vector, using the 2-step process 424 DMGlobalToLocalBegin(), DMGlobalToLocalEnd(). 425 By placing code between these two statements, computations can be 426 done while messages are in transition. 427 */ 428 PetscCall(DMGlobalToLocalBegin(da,global_in,INSERT_VALUES,local_in)); 429 PetscCall(DMGlobalToLocalEnd(da,global_in,INSERT_VALUES,local_in)); 430 431 /* 432 Access directly the values in our local INPUT work array 433 */ 434 PetscCall(VecGetArrayRead(local_in,&localptr)); 435 436 /* 437 Access directly the values in our local OUTPUT work array 438 */ 439 PetscCall(VecGetArray(localwork,©ptr)); 440 441 sc = 1.0/(appctx->h*appctx->h*2.0*(1.0+t)*(1.0+t)); 442 443 /* 444 Evaluate our function on the nodes owned by this processor 445 */ 446 PetscCall(VecGetLocalSize(local_in,&localsize)); 447 448 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 449 Compute entries for the locally owned part 450 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 451 452 /* 453 Handle boundary conditions: This is done by using the boundary condition 454 u(t,boundary) = g(t,boundary) 455 for some function g. Now take the derivative with respect to t to obtain 456 u_{t}(t,boundary) = g_{t}(t,boundary) 457 458 In our case, u(t,0) = t + 1, so that u_{t}(t,0) = 1 459 and u(t,1) = 2t+ 2, so that u_{t}(t,1) = 2 460 */ 461 PetscCallMPI(MPI_Comm_rank(appctx->comm,&rank)); 462 PetscCallMPI(MPI_Comm_size(appctx->comm,&size)); 463 if (rank == 0) copyptr[0] = 1.0; 464 if (rank == size-1) copyptr[localsize-1] = 2.0; 465 466 /* 467 Handle the interior nodes where the PDE is replace by finite 468 difference operators. 469 */ 470 for (i=1; i<localsize-1; i++) copyptr[i] = localptr[i] * sc * (localptr[i+1] + localptr[i-1] - 2.0*localptr[i]); 471 472 /* 473 Restore vectors 474 */ 475 PetscCall(VecRestoreArrayRead(local_in,&localptr)); 476 PetscCall(VecRestoreArray(localwork,©ptr)); 477 478 /* 479 Insert values from the local OUTPUT vector into the global 480 output vector 481 */ 482 PetscCall(DMLocalToGlobalBegin(da,localwork,INSERT_VALUES,global_out)); 483 PetscCall(DMLocalToGlobalEnd(da,localwork,INSERT_VALUES,global_out)); 484 485 /* Print debugging information if desired */ 486 if (appctx->debug) { 487 PetscCall(PetscPrintf(appctx->comm,"RHS function vector\n")); 488 PetscCall(VecView(global_out,PETSC_VIEWER_STDOUT_WORLD)); 489 } 490 491 return 0; 492 } 493 /* --------------------------------------------------------------------- */ 494 /* 495 RHSJacobian - User-provided routine to compute the Jacobian of 496 the nonlinear right-hand-side function of the ODE. 497 498 Input Parameters: 499 ts - the TS context 500 t - current time 501 global_in - global input vector 502 dummy - optional user-defined context, as set by TSetRHSJacobian() 503 504 Output Parameters: 505 AA - Jacobian matrix 506 BB - optionally different preconditioning matrix 507 str - flag indicating matrix structure 508 509 Notes: 510 RHSJacobian computes entries for the locally owned part of the Jacobian. 511 - Currently, all PETSc parallel matrix formats are partitioned by 512 contiguous chunks of rows across the processors. 513 - Each processor needs to insert only elements that it owns 514 locally (but any non-local elements will be sent to the 515 appropriate processor during matrix assembly). 516 - Always specify global row and columns of matrix entries when 517 using MatSetValues(). 518 - Here, we set all entries for a particular row at once. 519 - Note that MatSetValues() uses 0-based row and column numbers 520 in Fortran as well as in C. 521 */ 522 PetscErrorCode RHSJacobian(TS ts,PetscReal t,Vec global_in,Mat AA,Mat BB,void *ctx) 523 { 524 AppCtx *appctx = (AppCtx*)ctx; /* user-defined application context */ 525 Vec local_in = appctx->u_local; /* local ghosted input vector */ 526 DM da = appctx->da; /* distributed array */ 527 PetscScalar v[3],sc; 528 const PetscScalar *localptr; 529 PetscInt i,mstart,mend,mstarts,mends,idx[3],is; 530 531 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 532 Get ready for local Jacobian computations 533 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 534 /* 535 Scatter ghost points to local vector, using the 2-step process 536 DMGlobalToLocalBegin(), DMGlobalToLocalEnd(). 537 By placing code between these two statements, computations can be 538 done while messages are in transition. 539 */ 540 PetscCall(DMGlobalToLocalBegin(da,global_in,INSERT_VALUES,local_in)); 541 PetscCall(DMGlobalToLocalEnd(da,global_in,INSERT_VALUES,local_in)); 542 543 /* 544 Get pointer to vector data 545 */ 546 PetscCall(VecGetArrayRead(local_in,&localptr)); 547 548 /* 549 Get starting and ending locally owned rows of the matrix 550 */ 551 PetscCall(MatGetOwnershipRange(BB,&mstarts,&mends)); 552 mstart = mstarts; mend = mends; 553 554 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 555 Compute entries for the locally owned part of the Jacobian. 556 - Currently, all PETSc parallel matrix formats are partitioned by 557 contiguous chunks of rows across the processors. 558 - Each processor needs to insert only elements that it owns 559 locally (but any non-local elements will be sent to the 560 appropriate processor during matrix assembly). 561 - Here, we set all entries for a particular row at once. 562 - We can set matrix entries either using either 563 MatSetValuesLocal() or MatSetValues(). 564 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 565 566 /* 567 Set matrix rows corresponding to boundary data 568 */ 569 if (mstart == 0) { 570 v[0] = 0.0; 571 PetscCall(MatSetValues(BB,1,&mstart,1,&mstart,v,INSERT_VALUES)); 572 mstart++; 573 } 574 if (mend == appctx->m) { 575 mend--; 576 v[0] = 0.0; 577 PetscCall(MatSetValues(BB,1,&mend,1,&mend,v,INSERT_VALUES)); 578 } 579 580 /* 581 Set matrix rows corresponding to interior data. We construct the 582 matrix one row at a time. 583 */ 584 sc = 1.0/(appctx->h*appctx->h*2.0*(1.0+t)*(1.0+t)); 585 for (i=mstart; i<mend; i++) { 586 idx[0] = i-1; idx[1] = i; idx[2] = i+1; 587 is = i - mstart + 1; 588 v[0] = sc*localptr[is]; 589 v[1] = sc*(localptr[is+1] + localptr[is-1] - 4.0*localptr[is]); 590 v[2] = sc*localptr[is]; 591 PetscCall(MatSetValues(BB,1,&i,3,idx,v,INSERT_VALUES)); 592 } 593 594 /* 595 Restore vector 596 */ 597 PetscCall(VecRestoreArrayRead(local_in,&localptr)); 598 599 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 600 Complete the matrix assembly process and set some options 601 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 602 /* 603 Assemble matrix, using the 2-step process: 604 MatAssemblyBegin(), MatAssemblyEnd() 605 Computations can be done while messages are in transition 606 by placing code between these two statements. 607 */ 608 PetscCall(MatAssemblyBegin(BB,MAT_FINAL_ASSEMBLY)); 609 PetscCall(MatAssemblyEnd(BB,MAT_FINAL_ASSEMBLY)); 610 if (BB != AA) { 611 PetscCall(MatAssemblyBegin(AA,MAT_FINAL_ASSEMBLY)); 612 PetscCall(MatAssemblyEnd(AA,MAT_FINAL_ASSEMBLY)); 613 } 614 615 /* 616 Set and option to indicate that we will never add a new nonzero location 617 to the matrix. If we do, it will generate an error. 618 */ 619 PetscCall(MatSetOption(BB,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE)); 620 621 return 0; 622 } 623 624 /*TEST 625 626 test: 627 args: -nox -ts_dt 10 -mymonitor 628 nsize: 2 629 requires: !single 630 631 test: 632 suffix: tut_1 633 nsize: 1 634 args: -ts_max_steps 10 -ts_monitor 635 636 test: 637 suffix: tut_2 638 nsize: 4 639 args: -ts_max_steps 10 -ts_monitor -snes_monitor -ksp_monitor 640 641 test: 642 suffix: tut_3 643 nsize: 4 644 args: -ts_max_steps 10 -ts_monitor -M 128 645 646 TEST*/ 647