xref: /petsc/src/ts/tutorials/ex50.c (revision a69119a591a03a9d906b29c0a4e9802e4d7c9795)
1 
2 static char help[] = "Solves one dimensional Burger's equation compares with exact solution\n\n";
3 
4 /*
5 
6     Not yet tested in parallel
7 
8 */
9 
10 /* ------------------------------------------------------------------------
11 
12    This program uses the one-dimensional Burger's equation
13        u_t = mu*u_xx - u u_x,
14    on the domain 0 <= x <= 1, with periodic boundary conditions
15 
16    The operators are discretized with the spectral element method
17 
18    See the paper PDE-CONSTRAINED OPTIMIZATION WITH SPECTRAL ELEMENTS USING PETSC AND TAO
19    by OANA MARIN, EMIL CONSTANTINESCU, AND BARRY SMITH for details on the exact solution
20    used
21 
22    See src/tao/unconstrained/tutorials/burgers_spectral.c
23 
24   ------------------------------------------------------------------------- */
25 
26 #include <petscts.h>
27 #include <petscdt.h>
28 #include <petscdraw.h>
29 #include <petscdmda.h>
30 
31 /*
32    User-defined application context - contains data needed by the
33    application-provided call-back routines.
34 */
35 
36 typedef struct {
37   PetscInt   n;       /* number of nodes */
38   PetscReal *nodes;   /* GLL nodes */
39   PetscReal *weights; /* GLL weights */
40 } PetscGLL;
41 
42 typedef struct {
43   PetscInt  N;               /* grid points per elements*/
44   PetscInt  E;               /* number of elements */
45   PetscReal tol_L2, tol_max; /* error norms */
46   PetscInt  steps;           /* number of timesteps */
47   PetscReal Tend;            /* endtime */
48   PetscReal mu;              /* viscosity */
49   PetscReal L;               /* total length of domain */
50   PetscReal Le;
51   PetscReal Tadj;
52 } PetscParam;
53 
54 typedef struct {
55   Vec grid; /* total grid */
56   Vec curr_sol;
57 } PetscData;
58 
59 typedef struct {
60   Vec      grid;  /* total grid */
61   Vec      mass;  /* mass matrix for total integration */
62   Mat      stiff; /* stifness matrix */
63   Mat      keptstiff;
64   Mat      grad;
65   PetscGLL gll;
66 } PetscSEMOperators;
67 
68 typedef struct {
69   DM                da; /* distributed array data structure */
70   PetscSEMOperators SEMop;
71   PetscParam        param;
72   PetscData         dat;
73   TS                ts;
74   PetscReal         initial_dt;
75 } AppCtx;
76 
77 /*
78    User-defined routines
79 */
80 extern PetscErrorCode RHSMatrixLaplaciangllDM(TS, PetscReal, Vec, Mat, Mat, void *);
81 extern PetscErrorCode RHSMatrixAdvectiongllDM(TS, PetscReal, Vec, Mat, Mat, void *);
82 extern PetscErrorCode TrueSolution(TS, PetscReal, Vec, AppCtx *);
83 extern PetscErrorCode RHSFunction(TS, PetscReal, Vec, Vec, void *);
84 extern PetscErrorCode RHSJacobian(TS, PetscReal, Vec, Mat, Mat, void *);
85 
86 int main(int argc, char **argv) {
87   AppCtx       appctx; /* user-defined application context */
88   PetscInt     i, xs, xm, ind, j, lenglob;
89   PetscReal    x, *wrk_ptr1, *wrk_ptr2;
90   MatNullSpace nsp;
91   PetscMPIInt  size;
92 
93   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
94      Initialize program and set problem parameters
95      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
96   PetscFunctionBeginUser;
97 
98   PetscFunctionBeginUser;
99   PetscCall(PetscInitialize(&argc, &argv, (char *)0, help));
100 
101   /*initialize parameters */
102   appctx.param.N     = 10;   /* order of the spectral element */
103   appctx.param.E     = 10;   /* number of elements */
104   appctx.param.L     = 4.0;  /* length of the domain */
105   appctx.param.mu    = 0.01; /* diffusion coefficient */
106   appctx.initial_dt  = 5e-3;
107   appctx.param.steps = PETSC_MAX_INT;
108   appctx.param.Tend  = 4;
109 
110   PetscCall(PetscOptionsGetInt(NULL, NULL, "-N", &appctx.param.N, NULL));
111   PetscCall(PetscOptionsGetInt(NULL, NULL, "-E", &appctx.param.E, NULL));
112   PetscCall(PetscOptionsGetReal(NULL, NULL, "-Tend", &appctx.param.Tend, NULL));
113   PetscCall(PetscOptionsGetReal(NULL, NULL, "-mu", &appctx.param.mu, NULL));
114   appctx.param.Le = appctx.param.L / appctx.param.E;
115 
116   PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &size));
117   PetscCheck((appctx.param.E % size) == 0, PETSC_COMM_WORLD, PETSC_ERR_ARG_WRONG, "Number of elements must be divisible by number of processes");
118 
119   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
120      Create GLL data structures
121      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
122   PetscCall(PetscMalloc2(appctx.param.N, &appctx.SEMop.gll.nodes, appctx.param.N, &appctx.SEMop.gll.weights));
123   PetscCall(PetscDTGaussLobattoLegendreQuadrature(appctx.param.N, PETSCGAUSSLOBATTOLEGENDRE_VIA_LINEAR_ALGEBRA, appctx.SEMop.gll.nodes, appctx.SEMop.gll.weights));
124   appctx.SEMop.gll.n = appctx.param.N;
125   lenglob            = appctx.param.E * (appctx.param.N - 1);
126 
127   /*
128      Create distributed array (DMDA) to manage parallel grid and vectors
129      and to set up the ghost point communication pattern.  There are E*(Nl-1)+1
130      total grid values spread equally among all the processors, except first and last
131   */
132 
133   PetscCall(DMDACreate1d(PETSC_COMM_WORLD, DM_BOUNDARY_PERIODIC, lenglob, 1, 1, NULL, &appctx.da));
134   PetscCall(DMSetFromOptions(appctx.da));
135   PetscCall(DMSetUp(appctx.da));
136 
137   /*
138      Extract global and local vectors from DMDA; we use these to store the
139      approximate solution.  Then duplicate these for remaining vectors that
140      have the same types.
141   */
142 
143   PetscCall(DMCreateGlobalVector(appctx.da, &appctx.dat.curr_sol));
144   PetscCall(VecDuplicate(appctx.dat.curr_sol, &appctx.SEMop.grid));
145   PetscCall(VecDuplicate(appctx.dat.curr_sol, &appctx.SEMop.mass));
146 
147   PetscCall(DMDAGetCorners(appctx.da, &xs, NULL, NULL, &xm, NULL, NULL));
148   PetscCall(DMDAVecGetArray(appctx.da, appctx.SEMop.grid, &wrk_ptr1));
149   PetscCall(DMDAVecGetArray(appctx.da, appctx.SEMop.mass, &wrk_ptr2));
150 
151   /* Compute function over the locally owned part of the grid */
152 
153   xs = xs / (appctx.param.N - 1);
154   xm = xm / (appctx.param.N - 1);
155 
156   /*
157      Build total grid and mass over entire mesh (multi-elemental)
158   */
159 
160   for (i = xs; i < xs + xm; i++) {
161     for (j = 0; j < appctx.param.N - 1; j++) {
162       x             = (appctx.param.Le / 2.0) * (appctx.SEMop.gll.nodes[j] + 1.0) + appctx.param.Le * i;
163       ind           = i * (appctx.param.N - 1) + j;
164       wrk_ptr1[ind] = x;
165       wrk_ptr2[ind] = .5 * appctx.param.Le * appctx.SEMop.gll.weights[j];
166       if (j == 0) wrk_ptr2[ind] += .5 * appctx.param.Le * appctx.SEMop.gll.weights[j];
167     }
168   }
169   PetscCall(DMDAVecRestoreArray(appctx.da, appctx.SEMop.grid, &wrk_ptr1));
170   PetscCall(DMDAVecRestoreArray(appctx.da, appctx.SEMop.mass, &wrk_ptr2));
171 
172   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
173    Create matrix data structure; set matrix evaluation routine.
174    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
175   PetscCall(DMSetMatrixPreallocateOnly(appctx.da, PETSC_TRUE));
176   PetscCall(DMCreateMatrix(appctx.da, &appctx.SEMop.stiff));
177   PetscCall(DMCreateMatrix(appctx.da, &appctx.SEMop.grad));
178   /*
179    For linear problems with a time-dependent f(u,t) in the equation
180    u_t = f(u,t), the user provides the discretized right-hand-side
181    as a time-dependent matrix.
182    */
183   PetscCall(RHSMatrixLaplaciangllDM(appctx.ts, 0.0, appctx.dat.curr_sol, appctx.SEMop.stiff, appctx.SEMop.stiff, &appctx));
184   PetscCall(RHSMatrixAdvectiongllDM(appctx.ts, 0.0, appctx.dat.curr_sol, appctx.SEMop.grad, appctx.SEMop.grad, &appctx));
185   /*
186        For linear problems with a time-dependent f(u,t) in the equation
187        u_t = f(u,t), the user provides the discretized right-hand-side
188        as a time-dependent matrix.
189     */
190 
191   PetscCall(MatDuplicate(appctx.SEMop.stiff, MAT_COPY_VALUES, &appctx.SEMop.keptstiff));
192 
193   /* attach the null space to the matrix, this probably is not needed but does no harm */
194   PetscCall(MatNullSpaceCreate(PETSC_COMM_WORLD, PETSC_TRUE, 0, NULL, &nsp));
195   PetscCall(MatSetNullSpace(appctx.SEMop.stiff, nsp));
196   PetscCall(MatSetNullSpace(appctx.SEMop.keptstiff, nsp));
197   PetscCall(MatNullSpaceTest(nsp, appctx.SEMop.stiff, NULL));
198   PetscCall(MatNullSpaceDestroy(&nsp));
199   /* attach the null space to the matrix, this probably is not needed but does no harm */
200   PetscCall(MatNullSpaceCreate(PETSC_COMM_WORLD, PETSC_TRUE, 0, NULL, &nsp));
201   PetscCall(MatSetNullSpace(appctx.SEMop.grad, nsp));
202   PetscCall(MatNullSpaceTest(nsp, appctx.SEMop.grad, NULL));
203   PetscCall(MatNullSpaceDestroy(&nsp));
204 
205   /* Create the TS solver that solves the ODE and its adjoint; set its options */
206   PetscCall(TSCreate(PETSC_COMM_WORLD, &appctx.ts));
207   PetscCall(TSSetProblemType(appctx.ts, TS_NONLINEAR));
208   PetscCall(TSSetType(appctx.ts, TSRK));
209   PetscCall(TSSetDM(appctx.ts, appctx.da));
210   PetscCall(TSSetTime(appctx.ts, 0.0));
211   PetscCall(TSSetTimeStep(appctx.ts, appctx.initial_dt));
212   PetscCall(TSSetMaxSteps(appctx.ts, appctx.param.steps));
213   PetscCall(TSSetMaxTime(appctx.ts, appctx.param.Tend));
214   PetscCall(TSSetExactFinalTime(appctx.ts, TS_EXACTFINALTIME_MATCHSTEP));
215   PetscCall(TSSetTolerances(appctx.ts, 1e-7, NULL, 1e-7, NULL));
216   PetscCall(TSSetSaveTrajectory(appctx.ts));
217   PetscCall(TSSetFromOptions(appctx.ts));
218   PetscCall(TSSetRHSFunction(appctx.ts, NULL, RHSFunction, &appctx));
219   PetscCall(TSSetRHSJacobian(appctx.ts, appctx.SEMop.stiff, appctx.SEMop.stiff, RHSJacobian, &appctx));
220 
221   /* Set Initial conditions for the problem  */
222   PetscCall(TrueSolution(appctx.ts, 0, appctx.dat.curr_sol, &appctx));
223 
224   PetscCall(TSSetSolutionFunction(appctx.ts, (PetscErrorCode(*)(TS, PetscReal, Vec, void *))TrueSolution, &appctx));
225   PetscCall(TSSetTime(appctx.ts, 0.0));
226   PetscCall(TSSetStepNumber(appctx.ts, 0));
227 
228   PetscCall(TSSolve(appctx.ts, appctx.dat.curr_sol));
229 
230   PetscCall(MatDestroy(&appctx.SEMop.stiff));
231   PetscCall(MatDestroy(&appctx.SEMop.keptstiff));
232   PetscCall(MatDestroy(&appctx.SEMop.grad));
233   PetscCall(VecDestroy(&appctx.SEMop.grid));
234   PetscCall(VecDestroy(&appctx.SEMop.mass));
235   PetscCall(VecDestroy(&appctx.dat.curr_sol));
236   PetscCall(PetscFree2(appctx.SEMop.gll.nodes, appctx.SEMop.gll.weights));
237   PetscCall(DMDestroy(&appctx.da));
238   PetscCall(TSDestroy(&appctx.ts));
239 
240   /*
241      Always call PetscFinalize() before exiting a program.  This routine
242        - finalizes the PETSc libraries as well as MPI
243        - provides summary and diagnostic information if certain runtime
244          options are chosen (e.g., -log_summary).
245   */
246   PetscCall(PetscFinalize());
247   return 0;
248 }
249 
250 /*
251    TrueSolution() computes the true solution for the PDE
252 
253    Input Parameter:
254    u - uninitialized solution vector (global)
255    appctx - user-defined application context
256 
257    Output Parameter:
258    u - vector with solution at initial time (global)
259 */
260 PetscErrorCode TrueSolution(TS ts, PetscReal t, Vec u, AppCtx *appctx) {
261   PetscScalar       *s;
262   const PetscScalar *xg;
263   PetscInt           i, xs, xn;
264 
265   PetscCall(DMDAVecGetArray(appctx->da, u, &s));
266   PetscCall(DMDAVecGetArrayRead(appctx->da, appctx->SEMop.grid, (void *)&xg));
267   PetscCall(DMDAGetCorners(appctx->da, &xs, NULL, NULL, &xn, NULL, NULL));
268   for (i = xs; i < xs + xn; i++) {
269     s[i] = 2.0 * appctx->param.mu * PETSC_PI * PetscSinScalar(PETSC_PI * xg[i]) * PetscExpReal(-appctx->param.mu * PETSC_PI * PETSC_PI * t) / (2.0 + PetscCosScalar(PETSC_PI * xg[i]) * PetscExpReal(-appctx->param.mu * PETSC_PI * PETSC_PI * t));
270   }
271   PetscCall(DMDAVecRestoreArray(appctx->da, u, &s));
272   PetscCall(DMDAVecRestoreArrayRead(appctx->da, appctx->SEMop.grid, (void *)&xg));
273   return 0;
274 }
275 
276 PetscErrorCode RHSFunction(TS ts, PetscReal t, Vec globalin, Vec globalout, void *ctx) {
277   AppCtx *appctx = (AppCtx *)ctx;
278 
279   PetscFunctionBeginUser;
280   PetscCall(MatMult(appctx->SEMop.grad, globalin, globalout)); /* grad u */
281   PetscCall(VecPointwiseMult(globalout, globalin, globalout)); /* u grad u */
282   PetscCall(VecScale(globalout, -1.0));
283   PetscCall(MatMultAdd(appctx->SEMop.keptstiff, globalin, globalout, globalout));
284   PetscFunctionReturn(0);
285 }
286 
287 /*
288 
289       K is the discretiziation of the Laplacian
290       G is the discretization of the gradient
291 
292       Computes Jacobian of      K u + diag(u) G u   which is given by
293               K   + diag(u)G + diag(Gu)
294 */
295 PetscErrorCode RHSJacobian(TS ts, PetscReal t, Vec globalin, Mat A, Mat B, void *ctx) {
296   AppCtx *appctx = (AppCtx *)ctx;
297   Vec     Gglobalin;
298 
299   PetscFunctionBeginUser;
300   /*    A = diag(u) G */
301 
302   PetscCall(MatCopy(appctx->SEMop.grad, A, SAME_NONZERO_PATTERN));
303   PetscCall(MatDiagonalScale(A, globalin, NULL));
304 
305   /*    A  = A + diag(Gu) */
306   PetscCall(VecDuplicate(globalin, &Gglobalin));
307   PetscCall(MatMult(appctx->SEMop.grad, globalin, Gglobalin));
308   PetscCall(MatDiagonalSet(A, Gglobalin, ADD_VALUES));
309   PetscCall(VecDestroy(&Gglobalin));
310 
311   /*   A  = K - A    */
312   PetscCall(MatScale(A, -1.0));
313   PetscCall(MatAXPY(A, 0.0, appctx->SEMop.keptstiff, SAME_NONZERO_PATTERN));
314   PetscFunctionReturn(0);
315 }
316 
317 /* --------------------------------------------------------------------- */
318 
319 #include "petscblaslapack.h"
320 /*
321      Matrix free operation of 1d Laplacian and Grad for GLL spectral elements
322 */
323 PetscErrorCode MatMult_Laplacian(Mat A, Vec x, Vec y) {
324   AppCtx            *appctx;
325   PetscReal        **temp, vv;
326   PetscInt           i, j, xs, xn;
327   Vec                xlocal, ylocal;
328   const PetscScalar *xl;
329   PetscScalar       *yl;
330   PetscBLASInt       _One  = 1, n;
331   PetscScalar        _DOne = 1;
332 
333   PetscCall(MatShellGetContext(A, &appctx));
334   PetscCall(DMGetLocalVector(appctx->da, &xlocal));
335   PetscCall(DMGlobalToLocalBegin(appctx->da, x, INSERT_VALUES, xlocal));
336   PetscCall(DMGlobalToLocalEnd(appctx->da, x, INSERT_VALUES, xlocal));
337   PetscCall(DMGetLocalVector(appctx->da, &ylocal));
338   PetscCall(VecSet(ylocal, 0.0));
339   PetscCall(PetscGaussLobattoLegendreElementLaplacianCreate(appctx->SEMop.gll.n, appctx->SEMop.gll.nodes, appctx->SEMop.gll.weights, &temp));
340   for (i = 0; i < appctx->param.N; i++) {
341     vv = -appctx->param.mu * 2.0 / appctx->param.Le;
342     for (j = 0; j < appctx->param.N; j++) temp[i][j] = temp[i][j] * vv;
343   }
344   PetscCall(DMDAVecGetArrayRead(appctx->da, xlocal, (void *)&xl));
345   PetscCall(DMDAVecGetArray(appctx->da, ylocal, &yl));
346   PetscCall(DMDAGetCorners(appctx->da, &xs, NULL, NULL, &xn, NULL, NULL));
347   PetscCall(PetscBLASIntCast(appctx->param.N, &n));
348   for (j = xs; j < xs + xn; j += appctx->param.N - 1) PetscCallBLAS("BLASgemv", BLASgemv_("N", &n, &n, &_DOne, &temp[0][0], &n, &xl[j], &_One, &_DOne, &yl[j], &_One));
349   PetscCall(DMDAVecRestoreArrayRead(appctx->da, xlocal, (void *)&xl));
350   PetscCall(DMDAVecRestoreArray(appctx->da, ylocal, &yl));
351   PetscCall(PetscGaussLobattoLegendreElementLaplacianDestroy(appctx->SEMop.gll.n, appctx->SEMop.gll.nodes, appctx->SEMop.gll.weights, &temp));
352   PetscCall(VecSet(y, 0.0));
353   PetscCall(DMLocalToGlobalBegin(appctx->da, ylocal, ADD_VALUES, y));
354   PetscCall(DMLocalToGlobalEnd(appctx->da, ylocal, ADD_VALUES, y));
355   PetscCall(DMRestoreLocalVector(appctx->da, &xlocal));
356   PetscCall(DMRestoreLocalVector(appctx->da, &ylocal));
357   PetscCall(VecPointwiseDivide(y, y, appctx->SEMop.mass));
358   return 0;
359 }
360 
361 PetscErrorCode MatMult_Advection(Mat A, Vec x, Vec y) {
362   AppCtx            *appctx;
363   PetscReal        **temp;
364   PetscInt           j, xs, xn;
365   Vec                xlocal, ylocal;
366   const PetscScalar *xl;
367   PetscScalar       *yl;
368   PetscBLASInt       _One  = 1, n;
369   PetscScalar        _DOne = 1;
370 
371   PetscCall(MatShellGetContext(A, &appctx));
372   PetscCall(DMGetLocalVector(appctx->da, &xlocal));
373   PetscCall(DMGlobalToLocalBegin(appctx->da, x, INSERT_VALUES, xlocal));
374   PetscCall(DMGlobalToLocalEnd(appctx->da, x, INSERT_VALUES, xlocal));
375   PetscCall(DMGetLocalVector(appctx->da, &ylocal));
376   PetscCall(VecSet(ylocal, 0.0));
377   PetscCall(PetscGaussLobattoLegendreElementAdvectionCreate(appctx->SEMop.gll.n, appctx->SEMop.gll.nodes, appctx->SEMop.gll.weights, &temp));
378   PetscCall(DMDAVecGetArrayRead(appctx->da, xlocal, (void *)&xl));
379   PetscCall(DMDAVecGetArray(appctx->da, ylocal, &yl));
380   PetscCall(DMDAGetCorners(appctx->da, &xs, NULL, NULL, &xn, NULL, NULL));
381   PetscCall(PetscBLASIntCast(appctx->param.N, &n));
382   for (j = xs; j < xs + xn; j += appctx->param.N - 1) PetscCallBLAS("BLASgemv", BLASgemv_("N", &n, &n, &_DOne, &temp[0][0], &n, &xl[j], &_One, &_DOne, &yl[j], &_One));
383   PetscCall(DMDAVecRestoreArrayRead(appctx->da, xlocal, (void *)&xl));
384   PetscCall(DMDAVecRestoreArray(appctx->da, ylocal, &yl));
385   PetscCall(PetscGaussLobattoLegendreElementAdvectionDestroy(appctx->SEMop.gll.n, appctx->SEMop.gll.nodes, appctx->SEMop.gll.weights, &temp));
386   PetscCall(VecSet(y, 0.0));
387   PetscCall(DMLocalToGlobalBegin(appctx->da, ylocal, ADD_VALUES, y));
388   PetscCall(DMLocalToGlobalEnd(appctx->da, ylocal, ADD_VALUES, y));
389   PetscCall(DMRestoreLocalVector(appctx->da, &xlocal));
390   PetscCall(DMRestoreLocalVector(appctx->da, &ylocal));
391   PetscCall(VecPointwiseDivide(y, y, appctx->SEMop.mass));
392   PetscCall(VecScale(y, -1.0));
393   return 0;
394 }
395 
396 /*
397    RHSMatrixLaplacian - User-provided routine to compute the right-hand-side
398    matrix for the Laplacian operator
399 
400    Input Parameters:
401    ts - the TS context
402    t - current time  (ignored)
403    X - current solution (ignored)
404    dummy - optional user-defined context, as set by TSetRHSJacobian()
405 
406    Output Parameters:
407    AA - Jacobian matrix
408    BB - optionally different matrix from which the preconditioner is built
409    str - flag indicating matrix structure
410 
411 */
412 PetscErrorCode RHSMatrixLaplaciangllDM(TS ts, PetscReal t, Vec X, Mat A, Mat BB, void *ctx) {
413   PetscReal **temp;
414   PetscReal   vv;
415   AppCtx     *appctx = (AppCtx *)ctx; /* user-defined application context */
416   PetscInt    i, xs, xn, l, j;
417   PetscInt   *rowsDM;
418   PetscBool   flg = PETSC_FALSE;
419 
420   PetscCall(PetscOptionsGetBool(NULL, NULL, "-gll_mf", &flg, NULL));
421 
422   if (!flg) {
423     /*
424      Creates the element stiffness matrix for the given gll
425      */
426     PetscCall(PetscGaussLobattoLegendreElementLaplacianCreate(appctx->SEMop.gll.n, appctx->SEMop.gll.nodes, appctx->SEMop.gll.weights, &temp));
427     /* workaround for clang analyzer warning: Division by zero */
428     PetscCheck(appctx->param.N > 1, PETSC_COMM_WORLD, PETSC_ERR_ARG_WRONG, "Spectral element order should be > 1");
429 
430     /* scale by the size of the element */
431     for (i = 0; i < appctx->param.N; i++) {
432       vv = -appctx->param.mu * 2.0 / appctx->param.Le;
433       for (j = 0; j < appctx->param.N; j++) temp[i][j] = temp[i][j] * vv;
434     }
435 
436     PetscCall(MatSetOption(A, MAT_NEW_NONZERO_ALLOCATION_ERR, PETSC_FALSE));
437     PetscCall(DMDAGetCorners(appctx->da, &xs, NULL, NULL, &xn, NULL, NULL));
438 
439     xs = xs / (appctx->param.N - 1);
440     xn = xn / (appctx->param.N - 1);
441 
442     PetscCall(PetscMalloc1(appctx->param.N, &rowsDM));
443     /*
444      loop over local elements
445      */
446     for (j = xs; j < xs + xn; j++) {
447       for (l = 0; l < appctx->param.N; l++) rowsDM[l] = 1 + (j - xs) * (appctx->param.N - 1) + l;
448       PetscCall(MatSetValuesLocal(A, appctx->param.N, rowsDM, appctx->param.N, rowsDM, &temp[0][0], ADD_VALUES));
449     }
450     PetscCall(PetscFree(rowsDM));
451     PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY));
452     PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY));
453     PetscCall(VecReciprocal(appctx->SEMop.mass));
454     PetscCall(MatDiagonalScale(A, appctx->SEMop.mass, 0));
455     PetscCall(VecReciprocal(appctx->SEMop.mass));
456 
457     PetscCall(PetscGaussLobattoLegendreElementLaplacianDestroy(appctx->SEMop.gll.n, appctx->SEMop.gll.nodes, appctx->SEMop.gll.weights, &temp));
458   } else {
459     PetscCall(MatSetType(A, MATSHELL));
460     PetscCall(MatSetUp(A));
461     PetscCall(MatShellSetContext(A, appctx));
462     PetscCall(MatShellSetOperation(A, MATOP_MULT, (void (*)(void))MatMult_Laplacian));
463   }
464   return 0;
465 }
466 
467 /*
468    RHSMatrixAdvection - User-provided routine to compute the right-hand-side
469    matrix for the Advection (gradient) operator.
470 
471    Input Parameters:
472    ts - the TS context
473    t - current time
474    global_in - global input vector
475    dummy - optional user-defined context, as set by TSetRHSJacobian()
476 
477    Output Parameters:
478    AA - Jacobian matrix
479    BB - optionally different preconditioning matrix
480    str - flag indicating matrix structure
481 
482 */
483 PetscErrorCode RHSMatrixAdvectiongllDM(TS ts, PetscReal t, Vec X, Mat A, Mat BB, void *ctx) {
484   PetscReal **temp;
485   AppCtx     *appctx = (AppCtx *)ctx; /* user-defined application context */
486   PetscInt    xs, xn, l, j;
487   PetscInt   *rowsDM;
488   PetscBool   flg = PETSC_FALSE;
489 
490   PetscCall(PetscOptionsGetBool(NULL, NULL, "-gll_mf", &flg, NULL));
491 
492   if (!flg) {
493     /*
494      Creates the advection matrix for the given gll
495      */
496     PetscCall(PetscGaussLobattoLegendreElementAdvectionCreate(appctx->SEMop.gll.n, appctx->SEMop.gll.nodes, appctx->SEMop.gll.weights, &temp));
497     PetscCall(MatSetOption(A, MAT_NEW_NONZERO_ALLOCATION_ERR, PETSC_FALSE));
498     PetscCall(DMDAGetCorners(appctx->da, &xs, NULL, NULL, &xn, NULL, NULL));
499     xs = xs / (appctx->param.N - 1);
500     xn = xn / (appctx->param.N - 1);
501 
502     PetscCall(PetscMalloc1(appctx->param.N, &rowsDM));
503     for (j = xs; j < xs + xn; j++) {
504       for (l = 0; l < appctx->param.N; l++) rowsDM[l] = 1 + (j - xs) * (appctx->param.N - 1) + l;
505       PetscCall(MatSetValuesLocal(A, appctx->param.N, rowsDM, appctx->param.N, rowsDM, &temp[0][0], ADD_VALUES));
506     }
507     PetscCall(PetscFree(rowsDM));
508     PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY));
509     PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY));
510 
511     PetscCall(VecReciprocal(appctx->SEMop.mass));
512     PetscCall(MatDiagonalScale(A, appctx->SEMop.mass, 0));
513     PetscCall(VecReciprocal(appctx->SEMop.mass));
514 
515     PetscCall(PetscGaussLobattoLegendreElementAdvectionDestroy(appctx->SEMop.gll.n, appctx->SEMop.gll.nodes, appctx->SEMop.gll.weights, &temp));
516   } else {
517     PetscCall(MatSetType(A, MATSHELL));
518     PetscCall(MatSetUp(A));
519     PetscCall(MatShellSetContext(A, appctx));
520     PetscCall(MatShellSetOperation(A, MATOP_MULT, (void (*)(void))MatMult_Advection));
521   }
522   return 0;
523 }
524 
525 /*TEST
526 
527     build:
528       requires: !complex
529 
530     test:
531       suffix: 1
532       requires: !single
533 
534     test:
535       suffix: 2
536       nsize: 5
537       requires: !single
538 
539     test:
540       suffix: 3
541       requires: !single
542       args: -ts_view  -ts_type beuler -gll_mf -pc_type none -ts_max_steps 5 -ts_monitor_error
543 
544     test:
545       suffix: 4
546       requires: !single
547       args: -ts_view  -ts_type beuler  -pc_type none -ts_max_steps 5 -ts_monitor_error
548 
549 TEST*/
550