1 2 static char help[] ="Solves a time-dependent nonlinear PDE with lower and upper bounds on the interior grid points. Uses implicit\n\ 3 timestepping. Runtime options include:\n\ 4 -M <xg>, where <xg> = number of grid points\n\ 5 -debug : Activate debugging printouts\n\ 6 -nox : Deactivate x-window graphics\n\ 7 -ul : lower bound\n\ 8 -uh : upper bound\n\n"; 9 10 /* ------------------------------------------------------------------------ 11 12 This is a variation of ex2.c to solve the PDE 13 14 u * u_xx 15 u_t = --------- 16 2*(t+1)^2 17 18 with box constraints on the interior grid points 19 ul <= u(t,x) <= uh with x != 0,1 20 on the domain 0 <= x <= 1, with boundary conditions 21 u(t,0) = t + 1, u(t,1) = 2*t + 2, 22 and initial condition 23 u(0,x) = 1 + x*x. 24 25 The exact solution is: 26 u(t,x) = (1 + x*x) * (1 + t) 27 28 We use by default the backward Euler method. 29 30 ------------------------------------------------------------------------- */ 31 32 /* 33 Include "petscts.h" to use the PETSc timestepping routines. Note that 34 this file automatically includes "petscsys.h" and other lower-level 35 PETSc include files. 36 37 Include the "petscdmda.h" to allow us to use the distributed array data 38 structures to manage the parallel grid. 39 */ 40 #include <petscts.h> 41 #include <petscdm.h> 42 #include <petscdmda.h> 43 #include <petscdraw.h> 44 45 /* 46 User-defined application context - contains data needed by the 47 application-provided callback routines. 48 */ 49 typedef struct { 50 MPI_Comm comm; /* communicator */ 51 DM da; /* distributed array data structure */ 52 Vec localwork; /* local ghosted work vector */ 53 Vec u_local; /* local ghosted approximate solution vector */ 54 Vec solution; /* global exact solution vector */ 55 PetscInt m; /* total number of grid points */ 56 PetscReal h; /* mesh width: h = 1/(m-1) */ 57 PetscBool debug; /* flag (1 indicates activation of debugging printouts) */ 58 } AppCtx; 59 60 /* 61 User-defined routines, provided below. 62 */ 63 extern PetscErrorCode InitialConditions(Vec,AppCtx*); 64 extern PetscErrorCode RHSFunction(TS,PetscReal,Vec,Vec,void*); 65 extern PetscErrorCode RHSJacobian(TS,PetscReal,Vec,Mat,Mat,void*); 66 extern PetscErrorCode Monitor(TS,PetscInt,PetscReal,Vec,void*); 67 extern PetscErrorCode ExactSolution(PetscReal,Vec,AppCtx*); 68 extern PetscErrorCode SetBounds(Vec,Vec,PetscScalar,PetscScalar,AppCtx*); 69 70 int main(int argc,char **argv) 71 { 72 AppCtx appctx; /* user-defined application context */ 73 TS ts; /* timestepping context */ 74 Mat A; /* Jacobian matrix data structure */ 75 Vec u; /* approximate solution vector */ 76 Vec r; /* residual vector */ 77 PetscInt time_steps_max = 1000; /* default max timesteps */ 78 PetscReal dt; 79 PetscReal time_total_max = 100.0; /* default max total time */ 80 Vec xl,xu; /* Lower and upper bounds on variables */ 81 PetscScalar ul=0.0,uh = 3.0; 82 PetscBool mymonitor; 83 PetscReal bounds[] = {1.0, 3.3}; 84 85 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 86 Initialize program and set problem parameters 87 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 88 89 PetscFunctionBeginUser; 90 PetscCall(PetscInitialize(&argc,&argv,(char*)0,help)); 91 PetscCall(PetscViewerDrawSetBounds(PETSC_VIEWER_DRAW_(PETSC_COMM_WORLD),1,bounds)); 92 93 appctx.comm = PETSC_COMM_WORLD; 94 appctx.m = 60; 95 PetscCall(PetscOptionsGetInt(NULL,NULL,"-M",&appctx.m,NULL)); 96 PetscCall(PetscOptionsGetScalar(NULL,NULL,"-ul",&ul,NULL)); 97 PetscCall(PetscOptionsGetScalar(NULL,NULL,"-uh",&uh,NULL)); 98 PetscCall(PetscOptionsHasName(NULL,NULL,"-debug",&appctx.debug)); 99 PetscCall(PetscOptionsHasName(NULL,NULL,"-mymonitor",&mymonitor)); 100 appctx.h = 1.0/(appctx.m-1.0); 101 102 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 103 Create vector data structures 104 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 105 106 /* 107 Create distributed array (DMDA) to manage parallel grid and vectors 108 and to set up the ghost point communication pattern. There are M 109 total grid values spread equally among all the processors. 110 */ 111 PetscCall(DMDACreate1d(PETSC_COMM_WORLD,DM_BOUNDARY_NONE,appctx.m,1,1,NULL,&appctx.da)); 112 PetscCall(DMSetFromOptions(appctx.da)); 113 PetscCall(DMSetUp(appctx.da)); 114 115 /* 116 Extract global and local vectors from DMDA; we use these to store the 117 approximate solution. Then duplicate these for remaining vectors that 118 have the same types. 119 */ 120 PetscCall(DMCreateGlobalVector(appctx.da,&u)); 121 PetscCall(DMCreateLocalVector(appctx.da,&appctx.u_local)); 122 123 /* 124 Create local work vector for use in evaluating right-hand-side function; 125 create global work vector for storing exact solution. 126 */ 127 PetscCall(VecDuplicate(appctx.u_local,&appctx.localwork)); 128 PetscCall(VecDuplicate(u,&appctx.solution)); 129 130 /* Create residual vector */ 131 PetscCall(VecDuplicate(u,&r)); 132 /* Create lower and upper bound vectors */ 133 PetscCall(VecDuplicate(u,&xl)); 134 PetscCall(VecDuplicate(u,&xu)); 135 PetscCall(SetBounds(xl,xu,ul,uh,&appctx)); 136 137 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 138 Create timestepping solver context; set callback routine for 139 right-hand-side function evaluation. 140 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 141 142 PetscCall(TSCreate(PETSC_COMM_WORLD,&ts)); 143 PetscCall(TSSetProblemType(ts,TS_NONLINEAR)); 144 PetscCall(TSSetRHSFunction(ts,r,RHSFunction,&appctx)); 145 146 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 147 Set optional user-defined monitoring routine 148 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 149 150 if (mymonitor) { 151 PetscCall(TSMonitorSet(ts,Monitor,&appctx,NULL)); 152 } 153 154 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 155 For nonlinear problems, the user can provide a Jacobian evaluation 156 routine (or use a finite differencing approximation). 157 158 Create matrix data structure; set Jacobian evaluation routine. 159 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 160 161 PetscCall(MatCreate(PETSC_COMM_WORLD,&A)); 162 PetscCall(MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,appctx.m,appctx.m)); 163 PetscCall(MatSetFromOptions(A)); 164 PetscCall(MatSetUp(A)); 165 PetscCall(TSSetRHSJacobian(ts,A,A,RHSJacobian,&appctx)); 166 167 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 168 Set solution vector and initial timestep 169 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 170 171 dt = appctx.h/2.0; 172 PetscCall(TSSetTimeStep(ts,dt)); 173 174 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 175 Customize timestepping solver: 176 - Set the solution method to be the Backward Euler method. 177 - Set timestepping duration info 178 Then set runtime options, which can override these defaults. 179 For example, 180 -ts_max_steps <maxsteps> -ts_max_time <maxtime> 181 to override the defaults set by TSSetMaxSteps()/TSSetMaxTime(). 182 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 183 184 PetscCall(TSSetType(ts,TSBEULER)); 185 PetscCall(TSSetMaxSteps(ts,time_steps_max)); 186 PetscCall(TSSetMaxTime(ts,time_total_max)); 187 PetscCall(TSSetExactFinalTime(ts,TS_EXACTFINALTIME_STEPOVER)); 188 /* Set lower and upper bound on the solution vector for each time step */ 189 PetscCall(TSVISetVariableBounds(ts,xl,xu)); 190 PetscCall(TSSetFromOptions(ts)); 191 192 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 193 Solve the problem 194 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 195 196 /* 197 Evaluate initial conditions 198 */ 199 PetscCall(InitialConditions(u,&appctx)); 200 201 /* 202 Run the timestepping solver 203 */ 204 PetscCall(TSSolve(ts,u)); 205 206 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 207 Free work space. All PETSc objects should be destroyed when they 208 are no longer needed. 209 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 210 211 PetscCall(VecDestroy(&r)); 212 PetscCall(VecDestroy(&xl)); 213 PetscCall(VecDestroy(&xu)); 214 PetscCall(TSDestroy(&ts)); 215 PetscCall(VecDestroy(&u)); 216 PetscCall(MatDestroy(&A)); 217 PetscCall(DMDestroy(&appctx.da)); 218 PetscCall(VecDestroy(&appctx.localwork)); 219 PetscCall(VecDestroy(&appctx.solution)); 220 PetscCall(VecDestroy(&appctx.u_local)); 221 222 /* 223 Always call PetscFinalize() before exiting a program. This routine 224 - finalizes the PETSc libraries as well as MPI 225 - provides summary and diagnostic information if certain runtime 226 options are chosen (e.g., -log_view). 227 */ 228 PetscCall(PetscFinalize()); 229 return 0; 230 } 231 /* --------------------------------------------------------------------- */ 232 /* 233 InitialConditions - Computes the solution at the initial time. 234 235 Input Parameters: 236 u - uninitialized solution vector (global) 237 appctx - user-defined application context 238 239 Output Parameter: 240 u - vector with solution at initial time (global) 241 */ 242 PetscErrorCode InitialConditions(Vec u,AppCtx *appctx) 243 { 244 PetscScalar *u_localptr,h = appctx->h,x; 245 PetscInt i,mybase,myend; 246 247 /* 248 Determine starting point of each processor's range of 249 grid values. 250 */ 251 PetscCall(VecGetOwnershipRange(u,&mybase,&myend)); 252 253 /* 254 Get a pointer to vector data. 255 - For default PETSc vectors, VecGetArray() returns a pointer to 256 the data array. Otherwise, the routine is implementation dependent. 257 - You MUST call VecRestoreArray() when you no longer need access to 258 the array. 259 - Note that the Fortran interface to VecGetArray() differs from the 260 C version. See the users manual for details. 261 */ 262 PetscCall(VecGetArray(u,&u_localptr)); 263 264 /* 265 We initialize the solution array by simply writing the solution 266 directly into the array locations. Alternatively, we could use 267 VecSetValues() or VecSetValuesLocal(). 268 */ 269 for (i=mybase; i<myend; i++) { 270 x = h*(PetscReal)i; /* current location in global grid */ 271 u_localptr[i-mybase] = 1.0 + x*x; 272 } 273 274 /* 275 Restore vector 276 */ 277 PetscCall(VecRestoreArray(u,&u_localptr)); 278 279 /* 280 Print debugging information if desired 281 */ 282 if (appctx->debug) { 283 PetscCall(PetscPrintf(appctx->comm,"initial guess vector\n")); 284 PetscCall(VecView(u,PETSC_VIEWER_STDOUT_WORLD)); 285 } 286 287 return 0; 288 } 289 290 /* --------------------------------------------------------------------- */ 291 /* 292 SetBounds - Sets the lower and uper bounds on the interior points 293 294 Input parameters: 295 xl - vector of lower bounds 296 xu - vector of upper bounds 297 ul - constant lower bound for all variables 298 uh - constant upper bound for all variables 299 appctx - Application context 300 */ 301 PetscErrorCode SetBounds(Vec xl, Vec xu, PetscScalar ul, PetscScalar uh,AppCtx *appctx) 302 { 303 PetscScalar *l,*u; 304 PetscMPIInt rank,size; 305 PetscInt localsize; 306 307 PetscFunctionBeginUser; 308 PetscCall(VecSet(xl,ul)); 309 PetscCall(VecSet(xu,uh)); 310 PetscCall(VecGetLocalSize(xl,&localsize)); 311 PetscCall(VecGetArray(xl,&l)); 312 PetscCall(VecGetArray(xu,&u)); 313 314 PetscCallMPI(MPI_Comm_rank(appctx->comm,&rank)); 315 PetscCallMPI(MPI_Comm_size(appctx->comm,&size)); 316 if (rank == 0) { 317 l[0] = -PETSC_INFINITY; 318 u[0] = PETSC_INFINITY; 319 } 320 if (rank == size-1) { 321 l[localsize-1] = -PETSC_INFINITY; 322 u[localsize-1] = PETSC_INFINITY; 323 } 324 PetscCall(VecRestoreArray(xl,&l)); 325 PetscCall(VecRestoreArray(xu,&u)); 326 PetscFunctionReturn(0); 327 } 328 329 /* --------------------------------------------------------------------- */ 330 /* 331 ExactSolution - Computes the exact solution at a given time. 332 333 Input Parameters: 334 t - current time 335 solution - vector in which exact solution will be computed 336 appctx - user-defined application context 337 338 Output Parameter: 339 solution - vector with the newly computed exact solution 340 */ 341 PetscErrorCode ExactSolution(PetscReal t,Vec solution,AppCtx *appctx) 342 { 343 PetscScalar *s_localptr,h = appctx->h,x; 344 PetscInt i,mybase,myend; 345 346 /* 347 Determine starting and ending points of each processor's 348 range of grid values 349 */ 350 PetscCall(VecGetOwnershipRange(solution,&mybase,&myend)); 351 352 /* 353 Get a pointer to vector data. 354 */ 355 PetscCall(VecGetArray(solution,&s_localptr)); 356 357 /* 358 Simply write the solution directly into the array locations. 359 Alternatively, we could use VecSetValues() or VecSetValuesLocal(). 360 */ 361 for (i=mybase; i<myend; i++) { 362 x = h*(PetscReal)i; 363 s_localptr[i-mybase] = (t + 1.0)*(1.0 + x*x); 364 } 365 366 /* 367 Restore vector 368 */ 369 PetscCall(VecRestoreArray(solution,&s_localptr)); 370 return 0; 371 } 372 /* --------------------------------------------------------------------- */ 373 /* 374 Monitor - User-provided routine to monitor the solution computed at 375 each timestep. This example plots the solution and computes the 376 error in two different norms. 377 378 Input Parameters: 379 ts - the timestep context 380 step - the count of the current step (with 0 meaning the 381 initial condition) 382 time - the current time 383 u - the solution at this timestep 384 ctx - the user-provided context for this monitoring routine. 385 In this case we use the application context which contains 386 information about the problem size, workspace and the exact 387 solution. 388 */ 389 PetscErrorCode Monitor(TS ts,PetscInt step,PetscReal time,Vec u,void *ctx) 390 { 391 AppCtx *appctx = (AppCtx*) ctx; /* user-defined application context */ 392 PetscReal en2,en2s,enmax; 393 PetscDraw draw; 394 395 /* 396 We use the default X windows viewer 397 PETSC_VIEWER_DRAW_(appctx->comm) 398 that is associated with the current communicator. This saves 399 the effort of calling PetscViewerDrawOpen() to create the window. 400 Note that if we wished to plot several items in separate windows we 401 would create each viewer with PetscViewerDrawOpen() and store them in 402 the application context, appctx. 403 404 PetscReal buffering makes graphics look better. 405 */ 406 PetscCall(PetscViewerDrawGetDraw(PETSC_VIEWER_DRAW_(appctx->comm),0,&draw)); 407 PetscCall(PetscDrawSetDoubleBuffer(draw)); 408 PetscCall(VecView(u,PETSC_VIEWER_DRAW_(appctx->comm))); 409 410 /* 411 Compute the exact solution at this timestep 412 */ 413 PetscCall(ExactSolution(time,appctx->solution,appctx)); 414 415 /* 416 Print debugging information if desired 417 */ 418 if (appctx->debug) { 419 PetscCall(PetscPrintf(appctx->comm,"Computed solution vector\n")); 420 PetscCall(VecView(u,PETSC_VIEWER_STDOUT_WORLD)); 421 PetscCall(PetscPrintf(appctx->comm,"Exact solution vector\n")); 422 PetscCall(VecView(appctx->solution,PETSC_VIEWER_STDOUT_WORLD)); 423 } 424 425 /* 426 Compute the 2-norm and max-norm of the error 427 */ 428 PetscCall(VecAXPY(appctx->solution,-1.0,u)); 429 PetscCall(VecNorm(appctx->solution,NORM_2,&en2)); 430 en2s = PetscSqrtReal(appctx->h)*en2; /* scale the 2-norm by the grid spacing */ 431 PetscCall(VecNorm(appctx->solution,NORM_MAX,&enmax)); 432 433 /* 434 PetscPrintf() causes only the first processor in this 435 communicator to print the timestep information. 436 */ 437 PetscCall(PetscPrintf(appctx->comm,"Timestep %" PetscInt_FMT ": time = %g,2-norm error = %g, max norm error = %g\n",step,(double)time,(double)en2s,(double)enmax)); 438 439 /* 440 Print debugging information if desired 441 */ 442 /* if (appctx->debug) { 443 PetscCall(PetscPrintf(appctx->comm,"Error vector\n")); 444 PetscCall(VecView(appctx->solution,PETSC_VIEWER_STDOUT_WORLD)); 445 } */ 446 return 0; 447 } 448 /* --------------------------------------------------------------------- */ 449 /* 450 RHSFunction - User-provided routine that evalues the right-hand-side 451 function of the ODE. This routine is set in the main program by 452 calling TSSetRHSFunction(). We compute: 453 global_out = F(global_in) 454 455 Input Parameters: 456 ts - timesteping context 457 t - current time 458 global_in - vector containing the current iterate 459 ctx - (optional) user-provided context for function evaluation. 460 In this case we use the appctx defined above. 461 462 Output Parameter: 463 global_out - vector containing the newly evaluated function 464 */ 465 PetscErrorCode RHSFunction(TS ts,PetscReal t,Vec global_in,Vec global_out,void *ctx) 466 { 467 AppCtx *appctx = (AppCtx*) ctx; /* user-defined application context */ 468 DM da = appctx->da; /* distributed array */ 469 Vec local_in = appctx->u_local; /* local ghosted input vector */ 470 Vec localwork = appctx->localwork; /* local ghosted work vector */ 471 PetscInt i,localsize; 472 PetscMPIInt rank,size; 473 PetscScalar *copyptr,sc; 474 const PetscScalar *localptr; 475 476 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 477 Get ready for local function computations 478 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 479 /* 480 Scatter ghost points to local vector, using the 2-step process 481 DMGlobalToLocalBegin(), DMGlobalToLocalEnd(). 482 By placing code between these two statements, computations can be 483 done while messages are in transition. 484 */ 485 PetscCall(DMGlobalToLocalBegin(da,global_in,INSERT_VALUES,local_in)); 486 PetscCall(DMGlobalToLocalEnd(da,global_in,INSERT_VALUES,local_in)); 487 488 /* 489 Access directly the values in our local INPUT work array 490 */ 491 PetscCall(VecGetArrayRead(local_in,&localptr)); 492 493 /* 494 Access directly the values in our local OUTPUT work array 495 */ 496 PetscCall(VecGetArray(localwork,©ptr)); 497 498 sc = 1.0/(appctx->h*appctx->h*2.0*(1.0+t)*(1.0+t)); 499 500 /* 501 Evaluate our function on the nodes owned by this processor 502 */ 503 PetscCall(VecGetLocalSize(local_in,&localsize)); 504 505 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 506 Compute entries for the locally owned part 507 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 508 509 /* 510 Handle boundary conditions: This is done by using the boundary condition 511 u(t,boundary) = g(t,boundary) 512 for some function g. Now take the derivative with respect to t to obtain 513 u_{t}(t,boundary) = g_{t}(t,boundary) 514 515 In our case, u(t,0) = t + 1, so that u_{t}(t,0) = 1 516 and u(t,1) = 2t+ 2, so that u_{t}(t,1) = 2 517 */ 518 PetscCallMPI(MPI_Comm_rank(appctx->comm,&rank)); 519 PetscCallMPI(MPI_Comm_size(appctx->comm,&size)); 520 if (rank == 0) copyptr[0] = 1.0; 521 if (rank == size-1) copyptr[localsize-1] = (t < .5) ? 2.0 : 0.0; 522 523 /* 524 Handle the interior nodes where the PDE is replace by finite 525 difference operators. 526 */ 527 for (i=1; i<localsize-1; i++) copyptr[i] = localptr[i] * sc * (localptr[i+1] + localptr[i-1] - 2.0*localptr[i]); 528 529 /* 530 Restore vectors 531 */ 532 PetscCall(VecRestoreArrayRead(local_in,&localptr)); 533 PetscCall(VecRestoreArray(localwork,©ptr)); 534 535 /* 536 Insert values from the local OUTPUT vector into the global 537 output vector 538 */ 539 PetscCall(DMLocalToGlobalBegin(da,localwork,INSERT_VALUES,global_out)); 540 PetscCall(DMLocalToGlobalEnd(da,localwork,INSERT_VALUES,global_out)); 541 542 /* Print debugging information if desired */ 543 /* if (appctx->debug) { 544 PetscCall(PetscPrintf(appctx->comm,"RHS function vector\n")); 545 PetscCall(VecView(global_out,PETSC_VIEWER_STDOUT_WORLD)); 546 } */ 547 548 return 0; 549 } 550 /* --------------------------------------------------------------------- */ 551 /* 552 RHSJacobian - User-provided routine to compute the Jacobian of 553 the nonlinear right-hand-side function of the ODE. 554 555 Input Parameters: 556 ts - the TS context 557 t - current time 558 global_in - global input vector 559 dummy - optional user-defined context, as set by TSetRHSJacobian() 560 561 Output Parameters: 562 AA - Jacobian matrix 563 BB - optionally different preconditioning matrix 564 str - flag indicating matrix structure 565 566 Notes: 567 RHSJacobian computes entries for the locally owned part of the Jacobian. 568 - Currently, all PETSc parallel matrix formats are partitioned by 569 contiguous chunks of rows across the processors. 570 - Each processor needs to insert only elements that it owns 571 locally (but any non-local elements will be sent to the 572 appropriate processor during matrix assembly). 573 - Always specify global row and columns of matrix entries when 574 using MatSetValues(). 575 - Here, we set all entries for a particular row at once. 576 - Note that MatSetValues() uses 0-based row and column numbers 577 in Fortran as well as in C. 578 */ 579 PetscErrorCode RHSJacobian(TS ts,PetscReal t,Vec global_in,Mat AA,Mat B,void *ctx) 580 { 581 AppCtx *appctx = (AppCtx*)ctx; /* user-defined application context */ 582 Vec local_in = appctx->u_local; /* local ghosted input vector */ 583 DM da = appctx->da; /* distributed array */ 584 PetscScalar v[3],sc; 585 const PetscScalar *localptr; 586 PetscInt i,mstart,mend,mstarts,mends,idx[3],is; 587 588 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 589 Get ready for local Jacobian computations 590 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 591 /* 592 Scatter ghost points to local vector, using the 2-step process 593 DMGlobalToLocalBegin(), DMGlobalToLocalEnd(). 594 By placing code between these two statements, computations can be 595 done while messages are in transition. 596 */ 597 PetscCall(DMGlobalToLocalBegin(da,global_in,INSERT_VALUES,local_in)); 598 PetscCall(DMGlobalToLocalEnd(da,global_in,INSERT_VALUES,local_in)); 599 600 /* 601 Get pointer to vector data 602 */ 603 PetscCall(VecGetArrayRead(local_in,&localptr)); 604 605 /* 606 Get starting and ending locally owned rows of the matrix 607 */ 608 PetscCall(MatGetOwnershipRange(B,&mstarts,&mends)); 609 mstart = mstarts; mend = mends; 610 611 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 612 Compute entries for the locally owned part of the Jacobian. 613 - Currently, all PETSc parallel matrix formats are partitioned by 614 contiguous chunks of rows across the processors. 615 - Each processor needs to insert only elements that it owns 616 locally (but any non-local elements will be sent to the 617 appropriate processor during matrix assembly). 618 - Here, we set all entries for a particular row at once. 619 - We can set matrix entries either using either 620 MatSetValuesLocal() or MatSetValues(). 621 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 622 623 /* 624 Set matrix rows corresponding to boundary data 625 */ 626 if (mstart == 0) { 627 v[0] = 0.0; 628 PetscCall(MatSetValues(B,1,&mstart,1,&mstart,v,INSERT_VALUES)); 629 mstart++; 630 } 631 if (mend == appctx->m) { 632 mend--; 633 v[0] = 0.0; 634 PetscCall(MatSetValues(B,1,&mend,1,&mend,v,INSERT_VALUES)); 635 } 636 637 /* 638 Set matrix rows corresponding to interior data. We construct the 639 matrix one row at a time. 640 */ 641 sc = 1.0/(appctx->h*appctx->h*2.0*(1.0+t)*(1.0+t)); 642 for (i=mstart; i<mend; i++) { 643 idx[0] = i-1; idx[1] = i; idx[2] = i+1; 644 is = i - mstart + 1; 645 v[0] = sc*localptr[is]; 646 v[1] = sc*(localptr[is+1] + localptr[is-1] - 4.0*localptr[is]); 647 v[2] = sc*localptr[is]; 648 PetscCall(MatSetValues(B,1,&i,3,idx,v,INSERT_VALUES)); 649 } 650 651 /* 652 Restore vector 653 */ 654 PetscCall(VecRestoreArrayRead(local_in,&localptr)); 655 656 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 657 Complete the matrix assembly process and set some options 658 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 659 /* 660 Assemble matrix, using the 2-step process: 661 MatAssemblyBegin(), MatAssemblyEnd() 662 Computations can be done while messages are in transition 663 by placing code between these two statements. 664 */ 665 PetscCall(MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY)); 666 PetscCall(MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY)); 667 668 /* 669 Set and option to indicate that we will never add a new nonzero location 670 to the matrix. If we do, it will generate an error. 671 */ 672 PetscCall(MatSetOption(B,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE)); 673 674 return 0; 675 } 676 677 /*TEST 678 679 test: 680 args: -snes_type vinewtonrsls -ts_type glee -mymonitor -ts_max_steps 10 -nox 681 requires: !single 682 683 TEST*/ 684