1 static char help[] = "Solves a time-dependent nonlinear PDE with lower and upper bounds on the interior grid points. Uses implicit\n\
2 timestepping. Runtime options include:\n\
3 -M <xg>, where <xg> = number of grid points\n\
4 -debug : Activate debugging printouts\n\
5 -nox : Deactivate x-window graphics\n\
6 -ul : lower bound\n\
7 -uh : upper bound\n\n";
8
9 /* ------------------------------------------------------------------------
10
11 This is a variation of ex2.c to solve the PDE
12
13 u * u_xx
14 u_t = ---------
15 2*(t+1)^2
16
17 with box constraints on the interior grid points
18 ul <= u(t,x) <= uh with x != 0,1
19 on the domain 0 <= x <= 1, with boundary conditions
20 u(t,0) = t + 1, u(t,1) = 2*t + 2,
21 and initial condition
22 u(0,x) = 1 + x*x.
23
24 The exact solution is:
25 u(t,x) = (1 + x*x) * (1 + t)
26
27 We use by default the backward Euler method.
28
29 ------------------------------------------------------------------------- */
30
31 /*
32 Include "petscts.h" to use the PETSc timestepping routines. Note that
33 this file automatically includes "petscsys.h" and other lower-level
34 PETSc include files.
35
36 Include the "petscdmda.h" to allow us to use the distributed array data
37 structures to manage the parallel grid.
38 */
39 #include <petscts.h>
40 #include <petscdm.h>
41 #include <petscdmda.h>
42 #include <petscdraw.h>
43
44 /*
45 User-defined application context - contains data needed by the
46 application-provided callback routines.
47 */
48 typedef struct {
49 MPI_Comm comm; /* communicator */
50 DM da; /* distributed array data structure */
51 Vec localwork; /* local ghosted work vector */
52 Vec u_local; /* local ghosted approximate solution vector */
53 Vec solution; /* global exact solution vector */
54 PetscInt m; /* total number of grid points */
55 PetscReal h; /* mesh width: h = 1/(m-1) */
56 PetscBool debug; /* flag (1 indicates activation of debugging printouts) */
57 } AppCtx;
58
59 /*
60 User-defined routines, provided below.
61 */
62 extern PetscErrorCode InitialConditions(Vec, AppCtx *);
63 extern PetscErrorCode RHSFunction(TS, PetscReal, Vec, Vec, void *);
64 extern PetscErrorCode RHSJacobian(TS, PetscReal, Vec, Mat, Mat, void *);
65 extern PetscErrorCode Monitor(TS, PetscInt, PetscReal, Vec, void *);
66 extern PetscErrorCode ExactSolution(PetscReal, Vec, AppCtx *);
67 extern PetscErrorCode SetBounds(Vec, Vec, PetscScalar, PetscScalar, AppCtx *);
68
main(int argc,char ** argv)69 int main(int argc, char **argv)
70 {
71 AppCtx appctx; /* user-defined application context */
72 TS ts; /* timestepping context */
73 Mat A; /* Jacobian matrix data structure */
74 Vec u; /* approximate solution vector */
75 Vec r; /* residual vector */
76 PetscInt time_steps_max = 1000; /* default max timesteps */
77 PetscReal dt;
78 PetscReal time_total_max = 100.0; /* default max total time */
79 Vec xl, xu; /* Lower and upper bounds on variables */
80 PetscScalar ul = 0.0, uh = 3.0;
81 PetscBool mymonitor;
82 PetscReal bounds[] = {1.0, 3.3};
83
84 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
85 Initialize program and set problem parameters
86 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
87
88 PetscFunctionBeginUser;
89 PetscCall(PetscInitialize(&argc, &argv, NULL, help));
90 PetscCall(PetscViewerDrawSetBounds(PETSC_VIEWER_DRAW_(PETSC_COMM_WORLD), 1, bounds));
91
92 appctx.comm = PETSC_COMM_WORLD;
93 appctx.m = 60;
94 PetscCall(PetscOptionsGetInt(NULL, NULL, "-M", &appctx.m, NULL));
95 PetscCall(PetscOptionsGetScalar(NULL, NULL, "-ul", &ul, NULL));
96 PetscCall(PetscOptionsGetScalar(NULL, NULL, "-uh", &uh, NULL));
97 PetscCall(PetscOptionsHasName(NULL, NULL, "-debug", &appctx.debug));
98 PetscCall(PetscOptionsHasName(NULL, NULL, "-mymonitor", &mymonitor));
99 appctx.h = 1.0 / (appctx.m - 1.0);
100
101 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
102 Create vector data structures
103 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
104
105 /*
106 Create distributed array (DMDA) to manage parallel grid and vectors
107 and to set up the ghost point communication pattern. There are M
108 total grid values spread equally among all the processors.
109 */
110 PetscCall(DMDACreate1d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, appctx.m, 1, 1, NULL, &appctx.da));
111 PetscCall(DMSetFromOptions(appctx.da));
112 PetscCall(DMSetUp(appctx.da));
113
114 /*
115 Extract global and local vectors from DMDA; we use these to store the
116 approximate solution. Then duplicate these for remaining vectors that
117 have the same types.
118 */
119 PetscCall(DMCreateGlobalVector(appctx.da, &u));
120 PetscCall(DMCreateLocalVector(appctx.da, &appctx.u_local));
121
122 /*
123 Create local work vector for use in evaluating right-hand-side function;
124 create global work vector for storing exact solution.
125 */
126 PetscCall(VecDuplicate(appctx.u_local, &appctx.localwork));
127 PetscCall(VecDuplicate(u, &appctx.solution));
128
129 /* Create residual vector */
130 PetscCall(VecDuplicate(u, &r));
131 /* Create lower and upper bound vectors */
132 PetscCall(VecDuplicate(u, &xl));
133 PetscCall(VecDuplicate(u, &xu));
134 PetscCall(SetBounds(xl, xu, ul, uh, &appctx));
135
136 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
137 Create timestepping solver context; set callback routine for
138 right-hand-side function evaluation.
139 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
140
141 PetscCall(TSCreate(PETSC_COMM_WORLD, &ts));
142 PetscCall(TSSetProblemType(ts, TS_NONLINEAR));
143 PetscCall(TSSetRHSFunction(ts, r, RHSFunction, &appctx));
144
145 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
146 Set optional user-defined monitoring routine
147 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
148
149 if (mymonitor) PetscCall(TSMonitorSet(ts, Monitor, &appctx, NULL));
150
151 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
152 For nonlinear problems, the user can provide a Jacobian evaluation
153 routine (or use a finite differencing approximation).
154
155 Create matrix data structure; set Jacobian evaluation routine.
156 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
157
158 PetscCall(MatCreate(PETSC_COMM_WORLD, &A));
159 PetscCall(MatSetSizes(A, PETSC_DECIDE, PETSC_DECIDE, appctx.m, appctx.m));
160 PetscCall(MatSetFromOptions(A));
161 PetscCall(MatSetUp(A));
162 PetscCall(TSSetRHSJacobian(ts, A, A, RHSJacobian, &appctx));
163
164 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
165 Set solution vector and initial timestep
166 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
167
168 dt = appctx.h / 2.0;
169 PetscCall(TSSetTimeStep(ts, dt));
170
171 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
172 Customize timestepping solver:
173 - Set the solution method to be the Backward Euler method.
174 - Set timestepping duration info
175 Then set runtime options, which can override these defaults.
176 For example,
177 -ts_max_steps <maxsteps> -ts_max_time <maxtime>
178 to override the defaults set by TSSetMaxSteps()/TSSetMaxTime().
179 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
180
181 PetscCall(TSSetType(ts, TSBEULER));
182 PetscCall(TSSetMaxSteps(ts, time_steps_max));
183 PetscCall(TSSetMaxTime(ts, time_total_max));
184 PetscCall(TSSetExactFinalTime(ts, TS_EXACTFINALTIME_STEPOVER));
185 /* Set lower and upper bound on the solution vector for each time step */
186 PetscCall(TSVISetVariableBounds(ts, xl, xu));
187 PetscCall(TSSetFromOptions(ts));
188
189 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
190 Solve the problem
191 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
192
193 /*
194 Evaluate initial conditions
195 */
196 PetscCall(InitialConditions(u, &appctx));
197
198 /*
199 Run the timestepping solver
200 */
201 PetscCall(TSSolve(ts, u));
202
203 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
204 Free work space. All PETSc objects should be destroyed when they
205 are no longer needed.
206 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
207
208 PetscCall(VecDestroy(&r));
209 PetscCall(VecDestroy(&xl));
210 PetscCall(VecDestroy(&xu));
211 PetscCall(TSDestroy(&ts));
212 PetscCall(VecDestroy(&u));
213 PetscCall(MatDestroy(&A));
214 PetscCall(DMDestroy(&appctx.da));
215 PetscCall(VecDestroy(&appctx.localwork));
216 PetscCall(VecDestroy(&appctx.solution));
217 PetscCall(VecDestroy(&appctx.u_local));
218
219 /*
220 Always call PetscFinalize() before exiting a program. This routine
221 - finalizes the PETSc libraries as well as MPI
222 - provides summary and diagnostic information if certain runtime
223 options are chosen (e.g., -log_view).
224 */
225 PetscCall(PetscFinalize());
226 return 0;
227 }
228 /* --------------------------------------------------------------------- */
229 /*
230 InitialConditions - Computes the solution at the initial time.
231
232 Input Parameters:
233 u - uninitialized solution vector (global)
234 appctx - user-defined application context
235
236 Output Parameter:
237 u - vector with solution at initial time (global)
238 */
InitialConditions(Vec u,AppCtx * appctx)239 PetscErrorCode InitialConditions(Vec u, AppCtx *appctx)
240 {
241 PetscScalar *u_localptr, h = appctx->h, x;
242 PetscInt i, mybase, myend;
243
244 PetscFunctionBeginUser;
245 /*
246 Determine starting point of each processor's range of
247 grid values.
248 */
249 PetscCall(VecGetOwnershipRange(u, &mybase, &myend));
250
251 /*
252 Get a pointer to vector data.
253 - For default PETSc vectors, VecGetArray() returns a pointer to
254 the data array. Otherwise, the routine is implementation dependent.
255 - You MUST call VecRestoreArray() when you no longer need access to
256 the array.
257 - Note that the Fortran interface to VecGetArray() differs from the
258 C version. See the users manual for details.
259 */
260 PetscCall(VecGetArray(u, &u_localptr));
261
262 /*
263 We initialize the solution array by simply writing the solution
264 directly into the array locations. Alternatively, we could use
265 VecSetValues() or VecSetValuesLocal().
266 */
267 for (i = mybase; i < myend; i++) {
268 x = h * (PetscReal)i; /* current location in global grid */
269 u_localptr[i - mybase] = 1.0 + x * x;
270 }
271
272 /*
273 Restore vector
274 */
275 PetscCall(VecRestoreArray(u, &u_localptr));
276
277 /*
278 Print debugging information if desired
279 */
280 if (appctx->debug) {
281 PetscCall(PetscPrintf(appctx->comm, "initial guess vector\n"));
282 PetscCall(VecView(u, PETSC_VIEWER_STDOUT_WORLD));
283 }
284 PetscFunctionReturn(PETSC_SUCCESS);
285 }
286
287 /* --------------------------------------------------------------------- */
288 /*
289 SetBounds - Sets the lower and upper bounds on the interior points
290
291 Input parameters:
292 xl - vector of lower bounds
293 xu - vector of upper bounds
294 ul - constant lower bound for all variables
295 uh - constant upper bound for all variables
296 appctx - Application context
297 */
SetBounds(Vec xl,Vec xu,PetscScalar ul,PetscScalar uh,AppCtx * appctx)298 PetscErrorCode SetBounds(Vec xl, Vec xu, PetscScalar ul, PetscScalar uh, AppCtx *appctx)
299 {
300 PetscScalar *l, *u;
301 PetscMPIInt rank, size;
302 PetscInt localsize;
303
304 PetscFunctionBeginUser;
305 PetscCall(VecSet(xl, ul));
306 PetscCall(VecSet(xu, uh));
307 PetscCall(VecGetLocalSize(xl, &localsize));
308 PetscCall(VecGetArray(xl, &l));
309 PetscCall(VecGetArray(xu, &u));
310
311 PetscCallMPI(MPI_Comm_rank(appctx->comm, &rank));
312 PetscCallMPI(MPI_Comm_size(appctx->comm, &size));
313 if (rank == 0) {
314 l[0] = -PETSC_INFINITY;
315 u[0] = PETSC_INFINITY;
316 }
317 if (rank == size - 1) {
318 l[localsize - 1] = -PETSC_INFINITY;
319 u[localsize - 1] = PETSC_INFINITY;
320 }
321 PetscCall(VecRestoreArray(xl, &l));
322 PetscCall(VecRestoreArray(xu, &u));
323 PetscFunctionReturn(PETSC_SUCCESS);
324 }
325
326 /* --------------------------------------------------------------------- */
327 /*
328 ExactSolution - Computes the exact solution at a given time.
329
330 Input Parameters:
331 t - current time
332 solution - vector in which exact solution will be computed
333 appctx - user-defined application context
334
335 Output Parameter:
336 solution - vector with the newly computed exact solution
337 */
ExactSolution(PetscReal t,Vec solution,AppCtx * appctx)338 PetscErrorCode ExactSolution(PetscReal t, Vec solution, AppCtx *appctx)
339 {
340 PetscScalar *s_localptr, h = appctx->h, x;
341 PetscInt i, mybase, myend;
342
343 PetscFunctionBeginUser;
344 /*
345 Determine starting and ending points of each processor's
346 range of grid values
347 */
348 PetscCall(VecGetOwnershipRange(solution, &mybase, &myend));
349
350 /*
351 Get a pointer to vector data.
352 */
353 PetscCall(VecGetArray(solution, &s_localptr));
354
355 /*
356 Simply write the solution directly into the array locations.
357 Alternatively, we could use VecSetValues() or VecSetValuesLocal().
358 */
359 for (i = mybase; i < myend; i++) {
360 x = h * (PetscReal)i;
361 s_localptr[i - mybase] = (t + 1.0) * (1.0 + x * x);
362 }
363
364 /*
365 Restore vector
366 */
367 PetscCall(VecRestoreArray(solution, &s_localptr));
368 PetscFunctionReturn(PETSC_SUCCESS);
369 }
370 /* --------------------------------------------------------------------- */
371 /*
372 Monitor - User-provided routine to monitor the solution computed at
373 each timestep. This example plots the solution and computes the
374 error in two different norms.
375
376 Input Parameters:
377 ts - the timestep context
378 step - the count of the current step (with 0 meaning the
379 initial condition)
380 time - the current time
381 u - the solution at this timestep
382 ctx - the user-provided context for this monitoring routine.
383 In this case we use the application context which contains
384 information about the problem size, workspace and the exact
385 solution.
386 */
Monitor(TS ts,PetscInt step,PetscReal time,Vec u,PetscCtx ctx)387 PetscErrorCode Monitor(TS ts, PetscInt step, PetscReal time, Vec u, PetscCtx ctx)
388 {
389 AppCtx *appctx = (AppCtx *)ctx; /* user-defined application context */
390 PetscReal en2, en2s, enmax;
391 PetscDraw draw;
392
393 PetscFunctionBeginUser;
394 /*
395 We use the default X windows viewer
396 PETSC_VIEWER_DRAW_(appctx->comm)
397 that is associated with the current communicator. This saves
398 the effort of calling PetscViewerDrawOpen() to create the window.
399 Note that if we wished to plot several items in separate windows we
400 would create each viewer with PetscViewerDrawOpen() and store them in
401 the application context, appctx.
402
403 PetscReal buffering makes graphics look better.
404 */
405 PetscCall(PetscViewerDrawGetDraw(PETSC_VIEWER_DRAW_(appctx->comm), 0, &draw));
406 PetscCall(PetscDrawSetDoubleBuffer(draw));
407 PetscCall(VecView(u, PETSC_VIEWER_DRAW_(appctx->comm)));
408
409 /*
410 Compute the exact solution at this timestep
411 */
412 PetscCall(ExactSolution(time, appctx->solution, appctx));
413
414 /*
415 Print debugging information if desired
416 */
417 if (appctx->debug) {
418 PetscCall(PetscPrintf(appctx->comm, "Computed solution vector\n"));
419 PetscCall(VecView(u, PETSC_VIEWER_STDOUT_WORLD));
420 PetscCall(PetscPrintf(appctx->comm, "Exact solution vector\n"));
421 PetscCall(VecView(appctx->solution, PETSC_VIEWER_STDOUT_WORLD));
422 }
423
424 /*
425 Compute the 2-norm and max-norm of the error
426 */
427 PetscCall(VecAXPY(appctx->solution, -1.0, u));
428 PetscCall(VecNorm(appctx->solution, NORM_2, &en2));
429 en2s = PetscSqrtReal(appctx->h) * en2; /* scale the 2-norm by the grid spacing */
430 PetscCall(VecNorm(appctx->solution, NORM_MAX, &enmax));
431
432 /*
433 PetscPrintf() causes only the first processor in this
434 communicator to print the timestep information.
435 */
436 PetscCall(PetscPrintf(appctx->comm, "Timestep %" PetscInt_FMT ": time = %g,2-norm error = %g, max norm error = %g\n", step, (double)time, (double)en2s, (double)enmax));
437
438 /*
439 Print debugging information if desired
440 */
441 /* if (appctx->debug) {
442 PetscCall(PetscPrintf(appctx->comm,"Error vector\n"));
443 PetscCall(VecView(appctx->solution,PETSC_VIEWER_STDOUT_WORLD));
444 } */
445 PetscFunctionReturn(PETSC_SUCCESS);
446 }
447 /* --------------------------------------------------------------------- */
448 /*
449 RHSFunction - User-provided routine that evalues the right-hand-side
450 function of the ODE. This routine is set in the main program by
451 calling TSSetRHSFunction(). We compute:
452 global_out = F(global_in)
453
454 Input Parameters:
455 ts - timesteping context
456 t - current time
457 global_in - vector containing the current iterate
458 ctx - (optional) user-provided context for function evaluation.
459 In this case we use the appctx defined above.
460
461 Output Parameter:
462 global_out - vector containing the newly evaluated function
463 */
RHSFunction(TS ts,PetscReal t,Vec global_in,Vec global_out,PetscCtx ctx)464 PetscErrorCode RHSFunction(TS ts, PetscReal t, Vec global_in, Vec global_out, PetscCtx ctx)
465 {
466 AppCtx *appctx = (AppCtx *)ctx; /* user-defined application context */
467 DM da = appctx->da; /* distributed array */
468 Vec local_in = appctx->u_local; /* local ghosted input vector */
469 Vec localwork = appctx->localwork; /* local ghosted work vector */
470 PetscInt i, localsize;
471 PetscMPIInt rank, size;
472 PetscScalar *copyptr, sc;
473 const PetscScalar *localptr;
474
475 PetscFunctionBeginUser;
476 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
477 Get ready for local function computations
478 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
479 /*
480 Scatter ghost points to local vector, using the 2-step process
481 DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
482 By placing code between these two statements, computations can be
483 done while messages are in transition.
484 */
485 PetscCall(DMGlobalToLocalBegin(da, global_in, INSERT_VALUES, local_in));
486 PetscCall(DMGlobalToLocalEnd(da, global_in, INSERT_VALUES, local_in));
487
488 /*
489 Access directly the values in our local INPUT work array
490 */
491 PetscCall(VecGetArrayRead(local_in, &localptr));
492
493 /*
494 Access directly the values in our local OUTPUT work array
495 */
496 PetscCall(VecGetArray(localwork, ©ptr));
497
498 sc = 1.0 / (appctx->h * appctx->h * 2.0 * (1.0 + t) * (1.0 + t));
499
500 /*
501 Evaluate our function on the nodes owned by this processor
502 */
503 PetscCall(VecGetLocalSize(local_in, &localsize));
504
505 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
506 Compute entries for the locally owned part
507 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
508
509 /*
510 Handle boundary conditions: This is done by using the boundary condition
511 u(t,boundary) = g(t,boundary)
512 for some function g. Now take the derivative with respect to t to obtain
513 u_{t}(t,boundary) = g_{t}(t,boundary)
514
515 In our case, u(t,0) = t + 1, so that u_{t}(t,0) = 1
516 and u(t,1) = 2t+ 2, so that u_{t}(t,1) = 2
517 */
518 PetscCallMPI(MPI_Comm_rank(appctx->comm, &rank));
519 PetscCallMPI(MPI_Comm_size(appctx->comm, &size));
520 if (rank == 0) copyptr[0] = 1.0;
521 if (rank == size - 1) copyptr[localsize - 1] = (t < .5) ? 2.0 : 0.0;
522
523 /*
524 Handle the interior nodes where the PDE is replace by finite
525 difference operators.
526 */
527 for (i = 1; i < localsize - 1; i++) copyptr[i] = localptr[i] * sc * (localptr[i + 1] + localptr[i - 1] - 2.0 * localptr[i]);
528
529 /*
530 Restore vectors
531 */
532 PetscCall(VecRestoreArrayRead(local_in, &localptr));
533 PetscCall(VecRestoreArray(localwork, ©ptr));
534
535 /*
536 Insert values from the local OUTPUT vector into the global
537 output vector
538 */
539 PetscCall(DMLocalToGlobalBegin(da, localwork, INSERT_VALUES, global_out));
540 PetscCall(DMLocalToGlobalEnd(da, localwork, INSERT_VALUES, global_out));
541
542 /* Print debugging information if desired */
543 /* if (appctx->debug) {
544 PetscCall(PetscPrintf(appctx->comm,"RHS function vector\n"));
545 PetscCall(VecView(global_out,PETSC_VIEWER_STDOUT_WORLD));
546 } */
547 PetscFunctionReturn(PETSC_SUCCESS);
548 }
549 /* --------------------------------------------------------------------- */
550 /*
551 RHSJacobian - User-provided routine to compute the Jacobian of
552 the nonlinear right-hand-side function of the ODE.
553
554 Input Parameters:
555 ts - the TS context
556 t - current time
557 global_in - global input vector
558 dummy - optional user-defined context, as set by TSetRHSJacobian()
559
560 Output Parameters:
561 AA - Jacobian matrix
562 BB - optionally different matrix used to construct the preconditioner
563
564 Notes:
565 RHSJacobian computes entries for the locally owned part of the Jacobian.
566 - Currently, all PETSc parallel matrix formats are partitioned by
567 contiguous chunks of rows across the processors.
568 - Each processor needs to insert only elements that it owns
569 locally (but any non-local elements will be sent to the
570 appropriate processor during matrix assembly).
571 - Always specify global row and columns of matrix entries when
572 using MatSetValues().
573 - Here, we set all entries for a particular row at once.
574 - Note that MatSetValues() uses 0-based row and column numbers
575 in Fortran as well as in C.
576 */
RHSJacobian(TS ts,PetscReal t,Vec global_in,Mat AA,Mat B,PetscCtx ctx)577 PetscErrorCode RHSJacobian(TS ts, PetscReal t, Vec global_in, Mat AA, Mat B, PetscCtx ctx)
578 {
579 AppCtx *appctx = (AppCtx *)ctx; /* user-defined application context */
580 Vec local_in = appctx->u_local; /* local ghosted input vector */
581 DM da = appctx->da; /* distributed array */
582 PetscScalar v[3], sc;
583 const PetscScalar *localptr;
584 PetscInt i, mstart, mend, mstarts, mends, idx[3], is;
585
586 PetscFunctionBeginUser;
587 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
588 Get ready for local Jacobian computations
589 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
590 /*
591 Scatter ghost points to local vector, using the 2-step process
592 DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
593 By placing code between these two statements, computations can be
594 done while messages are in transition.
595 */
596 PetscCall(DMGlobalToLocalBegin(da, global_in, INSERT_VALUES, local_in));
597 PetscCall(DMGlobalToLocalEnd(da, global_in, INSERT_VALUES, local_in));
598
599 /*
600 Get pointer to vector data
601 */
602 PetscCall(VecGetArrayRead(local_in, &localptr));
603
604 /*
605 Get starting and ending locally owned rows of the matrix
606 */
607 PetscCall(MatGetOwnershipRange(B, &mstarts, &mends));
608 mstart = mstarts;
609 mend = mends;
610
611 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
612 Compute entries for the locally owned part of the Jacobian.
613 - Currently, all PETSc parallel matrix formats are partitioned by
614 contiguous chunks of rows across the processors.
615 - Each processor needs to insert only elements that it owns
616 locally (but any non-local elements will be sent to the
617 appropriate processor during matrix assembly).
618 - Here, we set all entries for a particular row at once.
619 - We can set matrix entries either using either
620 MatSetValuesLocal() or MatSetValues().
621 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
622
623 /*
624 Set matrix rows corresponding to boundary data
625 */
626 if (mstart == 0) {
627 v[0] = 0.0;
628 PetscCall(MatSetValues(B, 1, &mstart, 1, &mstart, v, INSERT_VALUES));
629 mstart++;
630 }
631 if (mend == appctx->m) {
632 mend--;
633 v[0] = 0.0;
634 PetscCall(MatSetValues(B, 1, &mend, 1, &mend, v, INSERT_VALUES));
635 }
636
637 /*
638 Set matrix rows corresponding to interior data. We construct the
639 matrix one row at a time.
640 */
641 sc = 1.0 / (appctx->h * appctx->h * 2.0 * (1.0 + t) * (1.0 + t));
642 for (i = mstart; i < mend; i++) {
643 idx[0] = i - 1;
644 idx[1] = i;
645 idx[2] = i + 1;
646 is = i - mstart + 1;
647 v[0] = sc * localptr[is];
648 v[1] = sc * (localptr[is + 1] + localptr[is - 1] - 4.0 * localptr[is]);
649 v[2] = sc * localptr[is];
650 PetscCall(MatSetValues(B, 1, &i, 3, idx, v, INSERT_VALUES));
651 }
652
653 /*
654 Restore vector
655 */
656 PetscCall(VecRestoreArrayRead(local_in, &localptr));
657
658 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
659 Complete the matrix assembly process and set some options
660 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
661 /*
662 Assemble matrix, using the 2-step process:
663 MatAssemblyBegin(), MatAssemblyEnd()
664 Computations can be done while messages are in transition
665 by placing code between these two statements.
666 */
667 PetscCall(MatAssemblyBegin(B, MAT_FINAL_ASSEMBLY));
668 PetscCall(MatAssemblyEnd(B, MAT_FINAL_ASSEMBLY));
669
670 /*
671 Set and option to indicate that we will never add a new nonzero location
672 to the matrix. If we do, it will generate an error.
673 */
674 PetscCall(MatSetOption(B, MAT_NEW_NONZERO_LOCATION_ERR, PETSC_TRUE));
675 PetscFunctionReturn(PETSC_SUCCESS);
676 }
677
678 /*TEST
679
680 test:
681 args: -snes_type vinewtonrsls -ts_type glee -mymonitor -ts_max_steps 10 -nox
682 requires: !single
683
684 TEST*/
685