1 2 static char help[] ="Solves a time-dependent nonlinear PDE with lower and upper bounds on the interior grid points. Uses implicit\n\ 3 timestepping. Runtime options include:\n\ 4 -M <xg>, where <xg> = number of grid points\n\ 5 -debug : Activate debugging printouts\n\ 6 -nox : Deactivate x-window graphics\n\ 7 -ul : lower bound\n\ 8 -uh : upper bound\n\n"; 9 10 /* ------------------------------------------------------------------------ 11 12 This is a variation of ex2.c to solve the PDE 13 14 u * u_xx 15 u_t = --------- 16 2*(t+1)^2 17 18 with box constraints on the interior grid points 19 ul <= u(t,x) <= uh with x != 0,1 20 on the domain 0 <= x <= 1, with boundary conditions 21 u(t,0) = t + 1, u(t,1) = 2*t + 2, 22 and initial condition 23 u(0,x) = 1 + x*x. 24 25 The exact solution is: 26 u(t,x) = (1 + x*x) * (1 + t) 27 28 We use by default the backward Euler method. 29 30 ------------------------------------------------------------------------- */ 31 32 /* 33 Include "petscts.h" to use the PETSc timestepping routines. Note that 34 this file automatically includes "petscsys.h" and other lower-level 35 PETSc include files. 36 37 Include the "petscdmda.h" to allow us to use the distributed array data 38 structures to manage the parallel grid. 39 */ 40 #include <petscts.h> 41 #include <petscdm.h> 42 #include <petscdmda.h> 43 #include <petscdraw.h> 44 45 /* 46 User-defined application context - contains data needed by the 47 application-provided callback routines. 48 */ 49 typedef struct { 50 MPI_Comm comm; /* communicator */ 51 DM da; /* distributed array data structure */ 52 Vec localwork; /* local ghosted work vector */ 53 Vec u_local; /* local ghosted approximate solution vector */ 54 Vec solution; /* global exact solution vector */ 55 PetscInt m; /* total number of grid points */ 56 PetscReal h; /* mesh width: h = 1/(m-1) */ 57 PetscBool debug; /* flag (1 indicates activation of debugging printouts) */ 58 } AppCtx; 59 60 /* 61 User-defined routines, provided below. 62 */ 63 extern PetscErrorCode InitialConditions(Vec,AppCtx*); 64 extern PetscErrorCode RHSFunction(TS,PetscReal,Vec,Vec,void*); 65 extern PetscErrorCode RHSJacobian(TS,PetscReal,Vec,Mat,Mat,void*); 66 extern PetscErrorCode Monitor(TS,PetscInt,PetscReal,Vec,void*); 67 extern PetscErrorCode ExactSolution(PetscReal,Vec,AppCtx*); 68 extern PetscErrorCode SetBounds(Vec,Vec,PetscScalar,PetscScalar,AppCtx*); 69 70 int main(int argc,char **argv) 71 { 72 AppCtx appctx; /* user-defined application context */ 73 TS ts; /* timestepping context */ 74 Mat A; /* Jacobian matrix data structure */ 75 Vec u; /* approximate solution vector */ 76 Vec r; /* residual vector */ 77 PetscInt time_steps_max = 1000; /* default max timesteps */ 78 PetscReal dt; 79 PetscReal time_total_max = 100.0; /* default max total time */ 80 Vec xl,xu; /* Lower and upper bounds on variables */ 81 PetscScalar ul=0.0,uh = 3.0; 82 PetscBool mymonitor; 83 PetscReal bounds[] = {1.0, 3.3}; 84 85 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 86 Initialize program and set problem parameters 87 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 88 89 PetscCall(PetscInitialize(&argc,&argv,(char*)0,help)); 90 PetscCall(PetscViewerDrawSetBounds(PETSC_VIEWER_DRAW_(PETSC_COMM_WORLD),1,bounds)); 91 92 appctx.comm = PETSC_COMM_WORLD; 93 appctx.m = 60; 94 PetscCall(PetscOptionsGetInt(NULL,NULL,"-M",&appctx.m,NULL)); 95 PetscCall(PetscOptionsGetScalar(NULL,NULL,"-ul",&ul,NULL)); 96 PetscCall(PetscOptionsGetScalar(NULL,NULL,"-uh",&uh,NULL)); 97 PetscCall(PetscOptionsHasName(NULL,NULL,"-debug",&appctx.debug)); 98 PetscCall(PetscOptionsHasName(NULL,NULL,"-mymonitor",&mymonitor)); 99 appctx.h = 1.0/(appctx.m-1.0); 100 101 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 102 Create vector data structures 103 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 104 105 /* 106 Create distributed array (DMDA) to manage parallel grid and vectors 107 and to set up the ghost point communication pattern. There are M 108 total grid values spread equally among all the processors. 109 */ 110 PetscCall(DMDACreate1d(PETSC_COMM_WORLD,DM_BOUNDARY_NONE,appctx.m,1,1,NULL,&appctx.da)); 111 PetscCall(DMSetFromOptions(appctx.da)); 112 PetscCall(DMSetUp(appctx.da)); 113 114 /* 115 Extract global and local vectors from DMDA; we use these to store the 116 approximate solution. Then duplicate these for remaining vectors that 117 have the same types. 118 */ 119 PetscCall(DMCreateGlobalVector(appctx.da,&u)); 120 PetscCall(DMCreateLocalVector(appctx.da,&appctx.u_local)); 121 122 /* 123 Create local work vector for use in evaluating right-hand-side function; 124 create global work vector for storing exact solution. 125 */ 126 PetscCall(VecDuplicate(appctx.u_local,&appctx.localwork)); 127 PetscCall(VecDuplicate(u,&appctx.solution)); 128 129 /* Create residual vector */ 130 PetscCall(VecDuplicate(u,&r)); 131 /* Create lower and upper bound vectors */ 132 PetscCall(VecDuplicate(u,&xl)); 133 PetscCall(VecDuplicate(u,&xu)); 134 PetscCall(SetBounds(xl,xu,ul,uh,&appctx)); 135 136 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 137 Create timestepping solver context; set callback routine for 138 right-hand-side function evaluation. 139 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 140 141 PetscCall(TSCreate(PETSC_COMM_WORLD,&ts)); 142 PetscCall(TSSetProblemType(ts,TS_NONLINEAR)); 143 PetscCall(TSSetRHSFunction(ts,r,RHSFunction,&appctx)); 144 145 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 146 Set optional user-defined monitoring routine 147 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 148 149 if (mymonitor) { 150 PetscCall(TSMonitorSet(ts,Monitor,&appctx,NULL)); 151 } 152 153 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 154 For nonlinear problems, the user can provide a Jacobian evaluation 155 routine (or use a finite differencing approximation). 156 157 Create matrix data structure; set Jacobian evaluation routine. 158 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 159 160 PetscCall(MatCreate(PETSC_COMM_WORLD,&A)); 161 PetscCall(MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,appctx.m,appctx.m)); 162 PetscCall(MatSetFromOptions(A)); 163 PetscCall(MatSetUp(A)); 164 PetscCall(TSSetRHSJacobian(ts,A,A,RHSJacobian,&appctx)); 165 166 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 167 Set solution vector and initial timestep 168 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 169 170 dt = appctx.h/2.0; 171 PetscCall(TSSetTimeStep(ts,dt)); 172 173 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 174 Customize timestepping solver: 175 - Set the solution method to be the Backward Euler method. 176 - Set timestepping duration info 177 Then set runtime options, which can override these defaults. 178 For example, 179 -ts_max_steps <maxsteps> -ts_max_time <maxtime> 180 to override the defaults set by TSSetMaxSteps()/TSSetMaxTime(). 181 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 182 183 PetscCall(TSSetType(ts,TSBEULER)); 184 PetscCall(TSSetMaxSteps(ts,time_steps_max)); 185 PetscCall(TSSetMaxTime(ts,time_total_max)); 186 PetscCall(TSSetExactFinalTime(ts,TS_EXACTFINALTIME_STEPOVER)); 187 /* Set lower and upper bound on the solution vector for each time step */ 188 PetscCall(TSVISetVariableBounds(ts,xl,xu)); 189 PetscCall(TSSetFromOptions(ts)); 190 191 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 192 Solve the problem 193 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 194 195 /* 196 Evaluate initial conditions 197 */ 198 PetscCall(InitialConditions(u,&appctx)); 199 200 /* 201 Run the timestepping solver 202 */ 203 PetscCall(TSSolve(ts,u)); 204 205 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 206 Free work space. All PETSc objects should be destroyed when they 207 are no longer needed. 208 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 209 210 PetscCall(VecDestroy(&r)); 211 PetscCall(VecDestroy(&xl)); 212 PetscCall(VecDestroy(&xu)); 213 PetscCall(TSDestroy(&ts)); 214 PetscCall(VecDestroy(&u)); 215 PetscCall(MatDestroy(&A)); 216 PetscCall(DMDestroy(&appctx.da)); 217 PetscCall(VecDestroy(&appctx.localwork)); 218 PetscCall(VecDestroy(&appctx.solution)); 219 PetscCall(VecDestroy(&appctx.u_local)); 220 221 /* 222 Always call PetscFinalize() before exiting a program. This routine 223 - finalizes the PETSc libraries as well as MPI 224 - provides summary and diagnostic information if certain runtime 225 options are chosen (e.g., -log_view). 226 */ 227 PetscCall(PetscFinalize()); 228 return 0; 229 } 230 /* --------------------------------------------------------------------- */ 231 /* 232 InitialConditions - Computes the solution at the initial time. 233 234 Input Parameters: 235 u - uninitialized solution vector (global) 236 appctx - user-defined application context 237 238 Output Parameter: 239 u - vector with solution at initial time (global) 240 */ 241 PetscErrorCode InitialConditions(Vec u,AppCtx *appctx) 242 { 243 PetscScalar *u_localptr,h = appctx->h,x; 244 PetscInt i,mybase,myend; 245 246 /* 247 Determine starting point of each processor's range of 248 grid values. 249 */ 250 PetscCall(VecGetOwnershipRange(u,&mybase,&myend)); 251 252 /* 253 Get a pointer to vector data. 254 - For default PETSc vectors, VecGetArray() returns a pointer to 255 the data array. Otherwise, the routine is implementation dependent. 256 - You MUST call VecRestoreArray() when you no longer need access to 257 the array. 258 - Note that the Fortran interface to VecGetArray() differs from the 259 C version. See the users manual for details. 260 */ 261 PetscCall(VecGetArray(u,&u_localptr)); 262 263 /* 264 We initialize the solution array by simply writing the solution 265 directly into the array locations. Alternatively, we could use 266 VecSetValues() or VecSetValuesLocal(). 267 */ 268 for (i=mybase; i<myend; i++) { 269 x = h*(PetscReal)i; /* current location in global grid */ 270 u_localptr[i-mybase] = 1.0 + x*x; 271 } 272 273 /* 274 Restore vector 275 */ 276 PetscCall(VecRestoreArray(u,&u_localptr)); 277 278 /* 279 Print debugging information if desired 280 */ 281 if (appctx->debug) { 282 PetscCall(PetscPrintf(appctx->comm,"initial guess vector\n")); 283 PetscCall(VecView(u,PETSC_VIEWER_STDOUT_WORLD)); 284 } 285 286 return 0; 287 } 288 289 /* --------------------------------------------------------------------- */ 290 /* 291 SetBounds - Sets the lower and uper bounds on the interior points 292 293 Input parameters: 294 xl - vector of lower bounds 295 xu - vector of upper bounds 296 ul - constant lower bound for all variables 297 uh - constant upper bound for all variables 298 appctx - Application context 299 */ 300 PetscErrorCode SetBounds(Vec xl, Vec xu, PetscScalar ul, PetscScalar uh,AppCtx *appctx) 301 { 302 PetscScalar *l,*u; 303 PetscMPIInt rank,size; 304 PetscInt localsize; 305 306 PetscFunctionBeginUser; 307 PetscCall(VecSet(xl,ul)); 308 PetscCall(VecSet(xu,uh)); 309 PetscCall(VecGetLocalSize(xl,&localsize)); 310 PetscCall(VecGetArray(xl,&l)); 311 PetscCall(VecGetArray(xu,&u)); 312 313 PetscCallMPI(MPI_Comm_rank(appctx->comm,&rank)); 314 PetscCallMPI(MPI_Comm_size(appctx->comm,&size)); 315 if (rank == 0) { 316 l[0] = -PETSC_INFINITY; 317 u[0] = PETSC_INFINITY; 318 } 319 if (rank == size-1) { 320 l[localsize-1] = -PETSC_INFINITY; 321 u[localsize-1] = PETSC_INFINITY; 322 } 323 PetscCall(VecRestoreArray(xl,&l)); 324 PetscCall(VecRestoreArray(xu,&u)); 325 PetscFunctionReturn(0); 326 } 327 328 /* --------------------------------------------------------------------- */ 329 /* 330 ExactSolution - Computes the exact solution at a given time. 331 332 Input Parameters: 333 t - current time 334 solution - vector in which exact solution will be computed 335 appctx - user-defined application context 336 337 Output Parameter: 338 solution - vector with the newly computed exact solution 339 */ 340 PetscErrorCode ExactSolution(PetscReal t,Vec solution,AppCtx *appctx) 341 { 342 PetscScalar *s_localptr,h = appctx->h,x; 343 PetscInt i,mybase,myend; 344 345 /* 346 Determine starting and ending points of each processor's 347 range of grid values 348 */ 349 PetscCall(VecGetOwnershipRange(solution,&mybase,&myend)); 350 351 /* 352 Get a pointer to vector data. 353 */ 354 PetscCall(VecGetArray(solution,&s_localptr)); 355 356 /* 357 Simply write the solution directly into the array locations. 358 Alternatively, we could use VecSetValues() or VecSetValuesLocal(). 359 */ 360 for (i=mybase; i<myend; i++) { 361 x = h*(PetscReal)i; 362 s_localptr[i-mybase] = (t + 1.0)*(1.0 + x*x); 363 } 364 365 /* 366 Restore vector 367 */ 368 PetscCall(VecRestoreArray(solution,&s_localptr)); 369 return 0; 370 } 371 /* --------------------------------------------------------------------- */ 372 /* 373 Monitor - User-provided routine to monitor the solution computed at 374 each timestep. This example plots the solution and computes the 375 error in two different norms. 376 377 Input Parameters: 378 ts - the timestep context 379 step - the count of the current step (with 0 meaning the 380 initial condition) 381 time - the current time 382 u - the solution at this timestep 383 ctx - the user-provided context for this monitoring routine. 384 In this case we use the application context which contains 385 information about the problem size, workspace and the exact 386 solution. 387 */ 388 PetscErrorCode Monitor(TS ts,PetscInt step,PetscReal time,Vec u,void *ctx) 389 { 390 AppCtx *appctx = (AppCtx*) ctx; /* user-defined application context */ 391 PetscReal en2,en2s,enmax; 392 PetscDraw draw; 393 394 /* 395 We use the default X windows viewer 396 PETSC_VIEWER_DRAW_(appctx->comm) 397 that is associated with the current communicator. This saves 398 the effort of calling PetscViewerDrawOpen() to create the window. 399 Note that if we wished to plot several items in separate windows we 400 would create each viewer with PetscViewerDrawOpen() and store them in 401 the application context, appctx. 402 403 PetscReal buffering makes graphics look better. 404 */ 405 PetscCall(PetscViewerDrawGetDraw(PETSC_VIEWER_DRAW_(appctx->comm),0,&draw)); 406 PetscCall(PetscDrawSetDoubleBuffer(draw)); 407 PetscCall(VecView(u,PETSC_VIEWER_DRAW_(appctx->comm))); 408 409 /* 410 Compute the exact solution at this timestep 411 */ 412 PetscCall(ExactSolution(time,appctx->solution,appctx)); 413 414 /* 415 Print debugging information if desired 416 */ 417 if (appctx->debug) { 418 PetscCall(PetscPrintf(appctx->comm,"Computed solution vector\n")); 419 PetscCall(VecView(u,PETSC_VIEWER_STDOUT_WORLD)); 420 PetscCall(PetscPrintf(appctx->comm,"Exact solution vector\n")); 421 PetscCall(VecView(appctx->solution,PETSC_VIEWER_STDOUT_WORLD)); 422 } 423 424 /* 425 Compute the 2-norm and max-norm of the error 426 */ 427 PetscCall(VecAXPY(appctx->solution,-1.0,u)); 428 PetscCall(VecNorm(appctx->solution,NORM_2,&en2)); 429 en2s = PetscSqrtReal(appctx->h)*en2; /* scale the 2-norm by the grid spacing */ 430 PetscCall(VecNorm(appctx->solution,NORM_MAX,&enmax)); 431 432 /* 433 PetscPrintf() causes only the first processor in this 434 communicator to print the timestep information. 435 */ 436 PetscCall(PetscPrintf(appctx->comm,"Timestep %" PetscInt_FMT ": time = %g,2-norm error = %g, max norm error = %g\n",step,(double)time,(double)en2s,(double)enmax)); 437 438 /* 439 Print debugging information if desired 440 */ 441 /* if (appctx->debug) { 442 PetscCall(PetscPrintf(appctx->comm,"Error vector\n")); 443 PetscCall(VecView(appctx->solution,PETSC_VIEWER_STDOUT_WORLD)); 444 } */ 445 return 0; 446 } 447 /* --------------------------------------------------------------------- */ 448 /* 449 RHSFunction - User-provided routine that evalues the right-hand-side 450 function of the ODE. This routine is set in the main program by 451 calling TSSetRHSFunction(). We compute: 452 global_out = F(global_in) 453 454 Input Parameters: 455 ts - timesteping context 456 t - current time 457 global_in - vector containing the current iterate 458 ctx - (optional) user-provided context for function evaluation. 459 In this case we use the appctx defined above. 460 461 Output Parameter: 462 global_out - vector containing the newly evaluated function 463 */ 464 PetscErrorCode RHSFunction(TS ts,PetscReal t,Vec global_in,Vec global_out,void *ctx) 465 { 466 AppCtx *appctx = (AppCtx*) ctx; /* user-defined application context */ 467 DM da = appctx->da; /* distributed array */ 468 Vec local_in = appctx->u_local; /* local ghosted input vector */ 469 Vec localwork = appctx->localwork; /* local ghosted work vector */ 470 PetscInt i,localsize; 471 PetscMPIInt rank,size; 472 PetscScalar *copyptr,sc; 473 const PetscScalar *localptr; 474 475 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 476 Get ready for local function computations 477 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 478 /* 479 Scatter ghost points to local vector, using the 2-step process 480 DMGlobalToLocalBegin(), DMGlobalToLocalEnd(). 481 By placing code between these two statements, computations can be 482 done while messages are in transition. 483 */ 484 PetscCall(DMGlobalToLocalBegin(da,global_in,INSERT_VALUES,local_in)); 485 PetscCall(DMGlobalToLocalEnd(da,global_in,INSERT_VALUES,local_in)); 486 487 /* 488 Access directly the values in our local INPUT work array 489 */ 490 PetscCall(VecGetArrayRead(local_in,&localptr)); 491 492 /* 493 Access directly the values in our local OUTPUT work array 494 */ 495 PetscCall(VecGetArray(localwork,©ptr)); 496 497 sc = 1.0/(appctx->h*appctx->h*2.0*(1.0+t)*(1.0+t)); 498 499 /* 500 Evaluate our function on the nodes owned by this processor 501 */ 502 PetscCall(VecGetLocalSize(local_in,&localsize)); 503 504 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 505 Compute entries for the locally owned part 506 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 507 508 /* 509 Handle boundary conditions: This is done by using the boundary condition 510 u(t,boundary) = g(t,boundary) 511 for some function g. Now take the derivative with respect to t to obtain 512 u_{t}(t,boundary) = g_{t}(t,boundary) 513 514 In our case, u(t,0) = t + 1, so that u_{t}(t,0) = 1 515 and u(t,1) = 2t+ 2, so that u_{t}(t,1) = 2 516 */ 517 PetscCallMPI(MPI_Comm_rank(appctx->comm,&rank)); 518 PetscCallMPI(MPI_Comm_size(appctx->comm,&size)); 519 if (rank == 0) copyptr[0] = 1.0; 520 if (rank == size-1) copyptr[localsize-1] = (t < .5) ? 2.0 : 0.0; 521 522 /* 523 Handle the interior nodes where the PDE is replace by finite 524 difference operators. 525 */ 526 for (i=1; i<localsize-1; i++) copyptr[i] = localptr[i] * sc * (localptr[i+1] + localptr[i-1] - 2.0*localptr[i]); 527 528 /* 529 Restore vectors 530 */ 531 PetscCall(VecRestoreArrayRead(local_in,&localptr)); 532 PetscCall(VecRestoreArray(localwork,©ptr)); 533 534 /* 535 Insert values from the local OUTPUT vector into the global 536 output vector 537 */ 538 PetscCall(DMLocalToGlobalBegin(da,localwork,INSERT_VALUES,global_out)); 539 PetscCall(DMLocalToGlobalEnd(da,localwork,INSERT_VALUES,global_out)); 540 541 /* Print debugging information if desired */ 542 /* if (appctx->debug) { 543 PetscCall(PetscPrintf(appctx->comm,"RHS function vector\n")); 544 PetscCall(VecView(global_out,PETSC_VIEWER_STDOUT_WORLD)); 545 } */ 546 547 return 0; 548 } 549 /* --------------------------------------------------------------------- */ 550 /* 551 RHSJacobian - User-provided routine to compute the Jacobian of 552 the nonlinear right-hand-side function of the ODE. 553 554 Input Parameters: 555 ts - the TS context 556 t - current time 557 global_in - global input vector 558 dummy - optional user-defined context, as set by TSetRHSJacobian() 559 560 Output Parameters: 561 AA - Jacobian matrix 562 BB - optionally different preconditioning matrix 563 str - flag indicating matrix structure 564 565 Notes: 566 RHSJacobian computes entries for the locally owned part of the Jacobian. 567 - Currently, all PETSc parallel matrix formats are partitioned by 568 contiguous chunks of rows across the processors. 569 - Each processor needs to insert only elements that it owns 570 locally (but any non-local elements will be sent to the 571 appropriate processor during matrix assembly). 572 - Always specify global row and columns of matrix entries when 573 using MatSetValues(). 574 - Here, we set all entries for a particular row at once. 575 - Note that MatSetValues() uses 0-based row and column numbers 576 in Fortran as well as in C. 577 */ 578 PetscErrorCode RHSJacobian(TS ts,PetscReal t,Vec global_in,Mat AA,Mat B,void *ctx) 579 { 580 AppCtx *appctx = (AppCtx*)ctx; /* user-defined application context */ 581 Vec local_in = appctx->u_local; /* local ghosted input vector */ 582 DM da = appctx->da; /* distributed array */ 583 PetscScalar v[3],sc; 584 const PetscScalar *localptr; 585 PetscInt i,mstart,mend,mstarts,mends,idx[3],is; 586 587 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 588 Get ready for local Jacobian computations 589 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 590 /* 591 Scatter ghost points to local vector, using the 2-step process 592 DMGlobalToLocalBegin(), DMGlobalToLocalEnd(). 593 By placing code between these two statements, computations can be 594 done while messages are in transition. 595 */ 596 PetscCall(DMGlobalToLocalBegin(da,global_in,INSERT_VALUES,local_in)); 597 PetscCall(DMGlobalToLocalEnd(da,global_in,INSERT_VALUES,local_in)); 598 599 /* 600 Get pointer to vector data 601 */ 602 PetscCall(VecGetArrayRead(local_in,&localptr)); 603 604 /* 605 Get starting and ending locally owned rows of the matrix 606 */ 607 PetscCall(MatGetOwnershipRange(B,&mstarts,&mends)); 608 mstart = mstarts; mend = mends; 609 610 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 611 Compute entries for the locally owned part of the Jacobian. 612 - Currently, all PETSc parallel matrix formats are partitioned by 613 contiguous chunks of rows across the processors. 614 - Each processor needs to insert only elements that it owns 615 locally (but any non-local elements will be sent to the 616 appropriate processor during matrix assembly). 617 - Here, we set all entries for a particular row at once. 618 - We can set matrix entries either using either 619 MatSetValuesLocal() or MatSetValues(). 620 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 621 622 /* 623 Set matrix rows corresponding to boundary data 624 */ 625 if (mstart == 0) { 626 v[0] = 0.0; 627 PetscCall(MatSetValues(B,1,&mstart,1,&mstart,v,INSERT_VALUES)); 628 mstart++; 629 } 630 if (mend == appctx->m) { 631 mend--; 632 v[0] = 0.0; 633 PetscCall(MatSetValues(B,1,&mend,1,&mend,v,INSERT_VALUES)); 634 } 635 636 /* 637 Set matrix rows corresponding to interior data. We construct the 638 matrix one row at a time. 639 */ 640 sc = 1.0/(appctx->h*appctx->h*2.0*(1.0+t)*(1.0+t)); 641 for (i=mstart; i<mend; i++) { 642 idx[0] = i-1; idx[1] = i; idx[2] = i+1; 643 is = i - mstart + 1; 644 v[0] = sc*localptr[is]; 645 v[1] = sc*(localptr[is+1] + localptr[is-1] - 4.0*localptr[is]); 646 v[2] = sc*localptr[is]; 647 PetscCall(MatSetValues(B,1,&i,3,idx,v,INSERT_VALUES)); 648 } 649 650 /* 651 Restore vector 652 */ 653 PetscCall(VecRestoreArrayRead(local_in,&localptr)); 654 655 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 656 Complete the matrix assembly process and set some options 657 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 658 /* 659 Assemble matrix, using the 2-step process: 660 MatAssemblyBegin(), MatAssemblyEnd() 661 Computations can be done while messages are in transition 662 by placing code between these two statements. 663 */ 664 PetscCall(MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY)); 665 PetscCall(MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY)); 666 667 /* 668 Set and option to indicate that we will never add a new nonzero location 669 to the matrix. If we do, it will generate an error. 670 */ 671 PetscCall(MatSetOption(B,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE)); 672 673 return 0; 674 } 675 676 /*TEST 677 678 test: 679 args: -snes_type vinewtonrsls -ts_type glee -mymonitor -ts_max_steps 10 -nox 680 requires: !single 681 682 TEST*/ 683