xref: /petsc/src/ts/tutorials/ex50.c (revision 46233b442a14b6faff55b7d8bbd3923d44c577ac)
1 static char help[] = "Solves one dimensional Burger's equation compares with exact solution\n\n";
2 
3 /*
4     Not yet tested in parallel
5 */
6 
7 /* ------------------------------------------------------------------------
8 
9    This program uses the one-dimensional Burger's equation
10        u_t = mu*u_xx - u u_x,
11    on the domain 0 <= x <= 1, with periodic boundary conditions
12 
13    The operators are discretized with the spectral element method
14 
15    See the paper PDE-CONSTRAINED OPTIMIZATION WITH SPECTRAL ELEMENTS USING PETSC AND TAO
16    by OANA MARIN, EMIL CONSTANTINESCU, AND BARRY SMITH for details on the exact solution
17    used
18 
19    See src/tao/unconstrained/tutorials/burgers_spectral.c
20 
21   ------------------------------------------------------------------------- */
22 
23 #include <petscts.h>
24 #include <petscdt.h>
25 #include <petscdraw.h>
26 #include <petscdmda.h>
27 
28 /*
29    User-defined application context - contains data needed by the
30    application-provided call-back routines.
31 */
32 
33 typedef struct {
34   PetscInt   n;       /* number of nodes */
35   PetscReal *nodes;   /* GLL nodes */
36   PetscReal *weights; /* GLL weights */
37 } PetscGLL;
38 
39 typedef struct {
40   PetscInt  N;               /* grid points per elements*/
41   PetscInt  E;               /* number of elements */
42   PetscReal tol_L2, tol_max; /* error norms */
43   PetscInt  steps;           /* number of timesteps */
44   PetscReal Tend;            /* endtime */
45   PetscReal mu;              /* viscosity */
46   PetscReal L;               /* total length of domain */
47   PetscReal Le;
48   PetscReal Tadj;
49 } PetscParam;
50 
51 typedef struct {
52   Vec grid; /* total grid */
53   Vec curr_sol;
54 } PetscData;
55 
56 typedef struct {
57   Vec      grid;  /* total grid */
58   Vec      mass;  /* mass matrix for total integration */
59   Mat      stiff; /* stifness matrix */
60   Mat      keptstiff;
61   Mat      grad;
62   PetscGLL gll;
63 } PetscSEMOperators;
64 
65 typedef struct {
66   DM                da; /* distributed array data structure */
67   PetscSEMOperators SEMop;
68   PetscParam        param;
69   PetscData         dat;
70   TS                ts;
71   PetscReal         initial_dt;
72 } AppCtx;
73 
74 /*
75    User-defined routines
76 */
77 extern PetscErrorCode RHSMatrixLaplaciangllDM(TS, PetscReal, Vec, Mat, Mat, void *);
78 extern PetscErrorCode RHSMatrixAdvectiongllDM(TS, PetscReal, Vec, Mat, Mat, void *);
79 extern PetscErrorCode TrueSolution(TS, PetscReal, Vec, AppCtx *);
80 extern PetscErrorCode RHSFunction(TS, PetscReal, Vec, Vec, void *);
81 extern PetscErrorCode RHSJacobian(TS, PetscReal, Vec, Mat, Mat, void *);
82 
83 int main(int argc, char **argv)
84 {
85   AppCtx       appctx; /* user-defined application context */
86   PetscInt     i, xs, xm, ind, j, lenglob;
87   PetscReal    x, *wrk_ptr1, *wrk_ptr2;
88   MatNullSpace nsp;
89   PetscMPIInt  size;
90 
91   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
92      Initialize program and set problem parameters
93      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
94   PetscFunctionBeginUser;
95 
96   PetscFunctionBeginUser;
97   PetscCall(PetscInitialize(&argc, &argv, (char *)0, help));
98 
99   /*initialize parameters */
100   appctx.param.N     = 10;   /* order of the spectral element */
101   appctx.param.E     = 10;   /* number of elements */
102   appctx.param.L     = 4.0;  /* length of the domain */
103   appctx.param.mu    = 0.01; /* diffusion coefficient */
104   appctx.initial_dt  = 5e-3;
105   appctx.param.steps = PETSC_MAX_INT;
106   appctx.param.Tend  = 4;
107 
108   PetscCall(PetscOptionsGetInt(NULL, NULL, "-N", &appctx.param.N, NULL));
109   PetscCall(PetscOptionsGetInt(NULL, NULL, "-E", &appctx.param.E, NULL));
110   PetscCall(PetscOptionsGetReal(NULL, NULL, "-Tend", &appctx.param.Tend, NULL));
111   PetscCall(PetscOptionsGetReal(NULL, NULL, "-mu", &appctx.param.mu, NULL));
112   appctx.param.Le = appctx.param.L / appctx.param.E;
113 
114   PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &size));
115   PetscCheck((appctx.param.E % size) == 0, PETSC_COMM_WORLD, PETSC_ERR_ARG_WRONG, "Number of elements must be divisible by number of processes");
116 
117   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
118      Create GLL data structures
119      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
120   PetscCall(PetscMalloc2(appctx.param.N, &appctx.SEMop.gll.nodes, appctx.param.N, &appctx.SEMop.gll.weights));
121   PetscCall(PetscDTGaussLobattoLegendreQuadrature(appctx.param.N, PETSCGAUSSLOBATTOLEGENDRE_VIA_LINEAR_ALGEBRA, appctx.SEMop.gll.nodes, appctx.SEMop.gll.weights));
122   appctx.SEMop.gll.n = appctx.param.N;
123   lenglob            = appctx.param.E * (appctx.param.N - 1);
124 
125   /*
126      Create distributed array (DMDA) to manage parallel grid and vectors
127      and to set up the ghost point communication pattern.  There are E*(Nl-1)+1
128      total grid values spread equally among all the processors, except first and last
129   */
130 
131   PetscCall(DMDACreate1d(PETSC_COMM_WORLD, DM_BOUNDARY_PERIODIC, lenglob, 1, 1, NULL, &appctx.da));
132   PetscCall(DMSetFromOptions(appctx.da));
133   PetscCall(DMSetUp(appctx.da));
134 
135   /*
136      Extract global and local vectors from DMDA; we use these to store the
137      approximate solution.  Then duplicate these for remaining vectors that
138      have the same types.
139   */
140 
141   PetscCall(DMCreateGlobalVector(appctx.da, &appctx.dat.curr_sol));
142   PetscCall(VecDuplicate(appctx.dat.curr_sol, &appctx.SEMop.grid));
143   PetscCall(VecDuplicate(appctx.dat.curr_sol, &appctx.SEMop.mass));
144 
145   PetscCall(DMDAGetCorners(appctx.da, &xs, NULL, NULL, &xm, NULL, NULL));
146   PetscCall(DMDAVecGetArray(appctx.da, appctx.SEMop.grid, &wrk_ptr1));
147   PetscCall(DMDAVecGetArray(appctx.da, appctx.SEMop.mass, &wrk_ptr2));
148 
149   /* Compute function over the locally owned part of the grid */
150 
151   xs = xs / (appctx.param.N - 1);
152   xm = xm / (appctx.param.N - 1);
153 
154   /*
155      Build total grid and mass over entire mesh (multi-elemental)
156   */
157 
158   for (i = xs; i < xs + xm; i++) {
159     for (j = 0; j < appctx.param.N - 1; j++) {
160       x             = (appctx.param.Le / 2.0) * (appctx.SEMop.gll.nodes[j] + 1.0) + appctx.param.Le * i;
161       ind           = i * (appctx.param.N - 1) + j;
162       wrk_ptr1[ind] = x;
163       wrk_ptr2[ind] = .5 * appctx.param.Le * appctx.SEMop.gll.weights[j];
164       if (j == 0) wrk_ptr2[ind] += .5 * appctx.param.Le * appctx.SEMop.gll.weights[j];
165     }
166   }
167   PetscCall(DMDAVecRestoreArray(appctx.da, appctx.SEMop.grid, &wrk_ptr1));
168   PetscCall(DMDAVecRestoreArray(appctx.da, appctx.SEMop.mass, &wrk_ptr2));
169 
170   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
171    Create matrix data structure; set matrix evaluation routine.
172    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
173   PetscCall(DMSetMatrixPreallocateOnly(appctx.da, PETSC_TRUE));
174   PetscCall(DMCreateMatrix(appctx.da, &appctx.SEMop.stiff));
175   PetscCall(DMCreateMatrix(appctx.da, &appctx.SEMop.grad));
176   /*
177    For linear problems with a time-dependent f(u,t) in the equation
178    u_t = f(u,t), the user provides the discretized right-hand-side
179    as a time-dependent matrix.
180    */
181   PetscCall(RHSMatrixLaplaciangllDM(appctx.ts, 0.0, appctx.dat.curr_sol, appctx.SEMop.stiff, appctx.SEMop.stiff, &appctx));
182   PetscCall(RHSMatrixAdvectiongllDM(appctx.ts, 0.0, appctx.dat.curr_sol, appctx.SEMop.grad, appctx.SEMop.grad, &appctx));
183   /*
184        For linear problems with a time-dependent f(u,t) in the equation
185        u_t = f(u,t), the user provides the discretized right-hand-side
186        as a time-dependent matrix.
187     */
188 
189   PetscCall(MatDuplicate(appctx.SEMop.stiff, MAT_COPY_VALUES, &appctx.SEMop.keptstiff));
190 
191   /* attach the null space to the matrix, this probably is not needed but does no harm */
192   PetscCall(MatNullSpaceCreate(PETSC_COMM_WORLD, PETSC_TRUE, 0, NULL, &nsp));
193   PetscCall(MatSetNullSpace(appctx.SEMop.stiff, nsp));
194   PetscCall(MatSetNullSpace(appctx.SEMop.keptstiff, nsp));
195   PetscCall(MatNullSpaceTest(nsp, appctx.SEMop.stiff, NULL));
196   PetscCall(MatNullSpaceDestroy(&nsp));
197   /* attach the null space to the matrix, this probably is not needed but does no harm */
198   PetscCall(MatNullSpaceCreate(PETSC_COMM_WORLD, PETSC_TRUE, 0, NULL, &nsp));
199   PetscCall(MatSetNullSpace(appctx.SEMop.grad, nsp));
200   PetscCall(MatNullSpaceTest(nsp, appctx.SEMop.grad, NULL));
201   PetscCall(MatNullSpaceDestroy(&nsp));
202 
203   /* Create the TS solver that solves the ODE and its adjoint; set its options */
204   PetscCall(TSCreate(PETSC_COMM_WORLD, &appctx.ts));
205   PetscCall(TSSetProblemType(appctx.ts, TS_NONLINEAR));
206   PetscCall(TSSetType(appctx.ts, TSRK));
207   PetscCall(TSSetDM(appctx.ts, appctx.da));
208   PetscCall(TSSetTime(appctx.ts, 0.0));
209   PetscCall(TSSetTimeStep(appctx.ts, appctx.initial_dt));
210   PetscCall(TSSetMaxSteps(appctx.ts, appctx.param.steps));
211   PetscCall(TSSetMaxTime(appctx.ts, appctx.param.Tend));
212   PetscCall(TSSetExactFinalTime(appctx.ts, TS_EXACTFINALTIME_MATCHSTEP));
213   PetscCall(TSSetTolerances(appctx.ts, 1e-7, NULL, 1e-7, NULL));
214   PetscCall(TSSetSaveTrajectory(appctx.ts));
215   PetscCall(TSSetFromOptions(appctx.ts));
216   PetscCall(TSSetRHSFunction(appctx.ts, NULL, RHSFunction, &appctx));
217   PetscCall(TSSetRHSJacobian(appctx.ts, appctx.SEMop.stiff, appctx.SEMop.stiff, RHSJacobian, &appctx));
218 
219   /* Set Initial conditions for the problem  */
220   PetscCall(TrueSolution(appctx.ts, 0, appctx.dat.curr_sol, &appctx));
221 
222   PetscCall(TSSetSolutionFunction(appctx.ts, (PetscErrorCode(*)(TS, PetscReal, Vec, void *))TrueSolution, &appctx));
223   PetscCall(TSSetTime(appctx.ts, 0.0));
224   PetscCall(TSSetStepNumber(appctx.ts, 0));
225 
226   PetscCall(TSSolve(appctx.ts, appctx.dat.curr_sol));
227 
228   PetscCall(MatDestroy(&appctx.SEMop.stiff));
229   PetscCall(MatDestroy(&appctx.SEMop.keptstiff));
230   PetscCall(MatDestroy(&appctx.SEMop.grad));
231   PetscCall(VecDestroy(&appctx.SEMop.grid));
232   PetscCall(VecDestroy(&appctx.SEMop.mass));
233   PetscCall(VecDestroy(&appctx.dat.curr_sol));
234   PetscCall(PetscFree2(appctx.SEMop.gll.nodes, appctx.SEMop.gll.weights));
235   PetscCall(DMDestroy(&appctx.da));
236   PetscCall(TSDestroy(&appctx.ts));
237 
238   /*
239      Always call PetscFinalize() before exiting a program.  This routine
240        - finalizes the PETSc libraries as well as MPI
241        - provides summary and diagnostic information if certain runtime
242          options are chosen (e.g., -log_view).
243   */
244   PetscCall(PetscFinalize());
245   return 0;
246 }
247 
248 /*
249    TrueSolution() computes the true solution for the PDE
250 
251    Input Parameter:
252    u - uninitialized solution vector (global)
253    appctx - user-defined application context
254 
255    Output Parameter:
256    u - vector with solution at initial time (global)
257 */
258 PetscErrorCode TrueSolution(TS ts, PetscReal t, Vec u, AppCtx *appctx)
259 {
260   PetscScalar       *s;
261   const PetscScalar *xg;
262   PetscInt           i, xs, xn;
263 
264   PetscFunctionBeginUser;
265   PetscCall(DMDAVecGetArray(appctx->da, u, &s));
266   PetscCall(DMDAVecGetArrayRead(appctx->da, appctx->SEMop.grid, (void *)&xg));
267   PetscCall(DMDAGetCorners(appctx->da, &xs, NULL, NULL, &xn, NULL, NULL));
268   for (i = xs; i < xs + xn; i++) {
269     s[i] = 2.0 * appctx->param.mu * PETSC_PI * PetscSinScalar(PETSC_PI * xg[i]) * PetscExpReal(-appctx->param.mu * PETSC_PI * PETSC_PI * t) / (2.0 + PetscCosScalar(PETSC_PI * xg[i]) * PetscExpReal(-appctx->param.mu * PETSC_PI * PETSC_PI * t));
270   }
271   PetscCall(DMDAVecRestoreArray(appctx->da, u, &s));
272   PetscCall(DMDAVecRestoreArrayRead(appctx->da, appctx->SEMop.grid, (void *)&xg));
273   PetscFunctionReturn(PETSC_SUCCESS);
274 }
275 
276 PetscErrorCode RHSFunction(TS ts, PetscReal t, Vec globalin, Vec globalout, void *ctx)
277 {
278   AppCtx *appctx = (AppCtx *)ctx;
279 
280   PetscFunctionBeginUser;
281   PetscCall(MatMult(appctx->SEMop.grad, globalin, globalout)); /* grad u */
282   PetscCall(VecPointwiseMult(globalout, globalin, globalout)); /* u grad u */
283   PetscCall(VecScale(globalout, -1.0));
284   PetscCall(MatMultAdd(appctx->SEMop.keptstiff, globalin, globalout, globalout));
285   PetscFunctionReturn(PETSC_SUCCESS);
286 }
287 
288 /*
289 
290       K is the discretiziation of the Laplacian
291       G is the discretization of the gradient
292 
293       Computes Jacobian of      K u + diag(u) G u   which is given by
294               K   + diag(u)G + diag(Gu)
295 */
296 PetscErrorCode RHSJacobian(TS ts, PetscReal t, Vec globalin, Mat A, Mat B, void *ctx)
297 {
298   AppCtx *appctx = (AppCtx *)ctx;
299   Vec     Gglobalin;
300 
301   PetscFunctionBeginUser;
302   /*    A = diag(u) G */
303 
304   PetscCall(MatCopy(appctx->SEMop.grad, A, SAME_NONZERO_PATTERN));
305   PetscCall(MatDiagonalScale(A, globalin, NULL));
306 
307   /*    A  = A + diag(Gu) */
308   PetscCall(VecDuplicate(globalin, &Gglobalin));
309   PetscCall(MatMult(appctx->SEMop.grad, globalin, Gglobalin));
310   PetscCall(MatDiagonalSet(A, Gglobalin, ADD_VALUES));
311   PetscCall(VecDestroy(&Gglobalin));
312 
313   /*   A  = K - A    */
314   PetscCall(MatScale(A, -1.0));
315   PetscCall(MatAXPY(A, 0.0, appctx->SEMop.keptstiff, SAME_NONZERO_PATTERN));
316   PetscFunctionReturn(PETSC_SUCCESS);
317 }
318 
319 #include <petscblaslapack.h>
320 /*
321      Matrix free operation of 1d Laplacian and Grad for GLL spectral elements
322 */
323 PetscErrorCode MatMult_Laplacian(Mat A, Vec x, Vec y)
324 {
325   AppCtx            *appctx;
326   PetscReal        **temp, vv;
327   PetscInt           i, j, xs, xn;
328   Vec                xlocal, ylocal;
329   const PetscScalar *xl;
330   PetscScalar       *yl;
331   PetscBLASInt       _One  = 1, n;
332   PetscScalar        _DOne = 1;
333 
334   PetscFunctionBeginUser;
335   PetscCall(MatShellGetContext(A, &appctx));
336   PetscCall(DMGetLocalVector(appctx->da, &xlocal));
337   PetscCall(DMGlobalToLocalBegin(appctx->da, x, INSERT_VALUES, xlocal));
338   PetscCall(DMGlobalToLocalEnd(appctx->da, x, INSERT_VALUES, xlocal));
339   PetscCall(DMGetLocalVector(appctx->da, &ylocal));
340   PetscCall(VecSet(ylocal, 0.0));
341   PetscCall(PetscGaussLobattoLegendreElementLaplacianCreate(appctx->SEMop.gll.n, appctx->SEMop.gll.nodes, appctx->SEMop.gll.weights, &temp));
342   for (i = 0; i < appctx->param.N; i++) {
343     vv = -appctx->param.mu * 2.0 / appctx->param.Le;
344     for (j = 0; j < appctx->param.N; j++) temp[i][j] = temp[i][j] * vv;
345   }
346   PetscCall(DMDAVecGetArrayRead(appctx->da, xlocal, (void *)&xl));
347   PetscCall(DMDAVecGetArray(appctx->da, ylocal, &yl));
348   PetscCall(DMDAGetCorners(appctx->da, &xs, NULL, NULL, &xn, NULL, NULL));
349   PetscCall(PetscBLASIntCast(appctx->param.N, &n));
350   for (j = xs; j < xs + xn; j += appctx->param.N - 1) PetscCallBLAS("BLASgemv", BLASgemv_("N", &n, &n, &_DOne, &temp[0][0], &n, &xl[j], &_One, &_DOne, &yl[j], &_One));
351   PetscCall(DMDAVecRestoreArrayRead(appctx->da, xlocal, (void *)&xl));
352   PetscCall(DMDAVecRestoreArray(appctx->da, ylocal, &yl));
353   PetscCall(PetscGaussLobattoLegendreElementLaplacianDestroy(appctx->SEMop.gll.n, appctx->SEMop.gll.nodes, appctx->SEMop.gll.weights, &temp));
354   PetscCall(VecSet(y, 0.0));
355   PetscCall(DMLocalToGlobalBegin(appctx->da, ylocal, ADD_VALUES, y));
356   PetscCall(DMLocalToGlobalEnd(appctx->da, ylocal, ADD_VALUES, y));
357   PetscCall(DMRestoreLocalVector(appctx->da, &xlocal));
358   PetscCall(DMRestoreLocalVector(appctx->da, &ylocal));
359   PetscCall(VecPointwiseDivide(y, y, appctx->SEMop.mass));
360   PetscFunctionReturn(PETSC_SUCCESS);
361 }
362 
363 PetscErrorCode MatMult_Advection(Mat A, Vec x, Vec y)
364 {
365   AppCtx            *appctx;
366   PetscReal        **temp;
367   PetscInt           j, xs, xn;
368   Vec                xlocal, ylocal;
369   const PetscScalar *xl;
370   PetscScalar       *yl;
371   PetscBLASInt       _One  = 1, n;
372   PetscScalar        _DOne = 1;
373 
374   PetscFunctionBeginUser;
375   PetscCall(MatShellGetContext(A, &appctx));
376   PetscCall(DMGetLocalVector(appctx->da, &xlocal));
377   PetscCall(DMGlobalToLocalBegin(appctx->da, x, INSERT_VALUES, xlocal));
378   PetscCall(DMGlobalToLocalEnd(appctx->da, x, INSERT_VALUES, xlocal));
379   PetscCall(DMGetLocalVector(appctx->da, &ylocal));
380   PetscCall(VecSet(ylocal, 0.0));
381   PetscCall(PetscGaussLobattoLegendreElementAdvectionCreate(appctx->SEMop.gll.n, appctx->SEMop.gll.nodes, appctx->SEMop.gll.weights, &temp));
382   PetscCall(DMDAVecGetArrayRead(appctx->da, xlocal, (void *)&xl));
383   PetscCall(DMDAVecGetArray(appctx->da, ylocal, &yl));
384   PetscCall(DMDAGetCorners(appctx->da, &xs, NULL, NULL, &xn, NULL, NULL));
385   PetscCall(PetscBLASIntCast(appctx->param.N, &n));
386   for (j = xs; j < xs + xn; j += appctx->param.N - 1) PetscCallBLAS("BLASgemv", BLASgemv_("N", &n, &n, &_DOne, &temp[0][0], &n, &xl[j], &_One, &_DOne, &yl[j], &_One));
387   PetscCall(DMDAVecRestoreArrayRead(appctx->da, xlocal, (void *)&xl));
388   PetscCall(DMDAVecRestoreArray(appctx->da, ylocal, &yl));
389   PetscCall(PetscGaussLobattoLegendreElementAdvectionDestroy(appctx->SEMop.gll.n, appctx->SEMop.gll.nodes, appctx->SEMop.gll.weights, &temp));
390   PetscCall(VecSet(y, 0.0));
391   PetscCall(DMLocalToGlobalBegin(appctx->da, ylocal, ADD_VALUES, y));
392   PetscCall(DMLocalToGlobalEnd(appctx->da, ylocal, ADD_VALUES, y));
393   PetscCall(DMRestoreLocalVector(appctx->da, &xlocal));
394   PetscCall(DMRestoreLocalVector(appctx->da, &ylocal));
395   PetscCall(VecPointwiseDivide(y, y, appctx->SEMop.mass));
396   PetscCall(VecScale(y, -1.0));
397   PetscFunctionReturn(PETSC_SUCCESS);
398 }
399 
400 /*
401    RHSMatrixLaplacian - User-provided routine to compute the right-hand-side
402    matrix for the Laplacian operator
403 
404    Input Parameters:
405    ts - the TS context
406    t - current time  (ignored)
407    X - current solution (ignored)
408    dummy - optional user-defined context, as set by TSetRHSJacobian()
409 
410    Output Parameters:
411    AA - Jacobian matrix
412    BB - optionally different matrix from which the preconditioner is built
413    str - flag indicating matrix structure
414 
415 */
416 PetscErrorCode RHSMatrixLaplaciangllDM(TS ts, PetscReal t, Vec X, Mat A, Mat BB, void *ctx)
417 {
418   PetscReal **temp;
419   PetscReal   vv;
420   AppCtx     *appctx = (AppCtx *)ctx; /* user-defined application context */
421   PetscInt    i, xs, xn, l, j;
422   PetscInt   *rowsDM;
423   PetscBool   flg = PETSC_FALSE;
424 
425   PetscFunctionBeginUser;
426   PetscCall(PetscOptionsGetBool(NULL, NULL, "-gll_mf", &flg, NULL));
427 
428   if (!flg) {
429     /*
430      Creates the element stiffness matrix for the given gll
431      */
432     PetscCall(PetscGaussLobattoLegendreElementLaplacianCreate(appctx->SEMop.gll.n, appctx->SEMop.gll.nodes, appctx->SEMop.gll.weights, &temp));
433     /* workaround for clang analyzer warning: Division by zero */
434     PetscCheck(appctx->param.N > 1, PETSC_COMM_WORLD, PETSC_ERR_ARG_WRONG, "Spectral element order should be > 1");
435 
436     /* scale by the size of the element */
437     for (i = 0; i < appctx->param.N; i++) {
438       vv = -appctx->param.mu * 2.0 / appctx->param.Le;
439       for (j = 0; j < appctx->param.N; j++) temp[i][j] = temp[i][j] * vv;
440     }
441 
442     PetscCall(MatSetOption(A, MAT_NEW_NONZERO_ALLOCATION_ERR, PETSC_FALSE));
443     PetscCall(DMDAGetCorners(appctx->da, &xs, NULL, NULL, &xn, NULL, NULL));
444 
445     xs = xs / (appctx->param.N - 1);
446     xn = xn / (appctx->param.N - 1);
447 
448     PetscCall(PetscMalloc1(appctx->param.N, &rowsDM));
449     /*
450      loop over local elements
451      */
452     for (j = xs; j < xs + xn; j++) {
453       for (l = 0; l < appctx->param.N; l++) rowsDM[l] = 1 + (j - xs) * (appctx->param.N - 1) + l;
454       PetscCall(MatSetValuesLocal(A, appctx->param.N, rowsDM, appctx->param.N, rowsDM, &temp[0][0], ADD_VALUES));
455     }
456     PetscCall(PetscFree(rowsDM));
457     PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY));
458     PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY));
459     PetscCall(VecReciprocal(appctx->SEMop.mass));
460     PetscCall(MatDiagonalScale(A, appctx->SEMop.mass, 0));
461     PetscCall(VecReciprocal(appctx->SEMop.mass));
462 
463     PetscCall(PetscGaussLobattoLegendreElementLaplacianDestroy(appctx->SEMop.gll.n, appctx->SEMop.gll.nodes, appctx->SEMop.gll.weights, &temp));
464   } else {
465     PetscCall(MatSetType(A, MATSHELL));
466     PetscCall(MatSetUp(A));
467     PetscCall(MatShellSetContext(A, appctx));
468     PetscCall(MatShellSetOperation(A, MATOP_MULT, (void (*)(void))MatMult_Laplacian));
469   }
470   PetscFunctionReturn(PETSC_SUCCESS);
471 }
472 
473 /*
474    RHSMatrixAdvection - User-provided routine to compute the right-hand-side
475    matrix for the Advection (gradient) operator.
476 
477    Input Parameters:
478    ts - the TS context
479    t - current time
480    global_in - global input vector
481    dummy - optional user-defined context, as set by TSetRHSJacobian()
482 
483    Output Parameters:
484    AA - Jacobian matrix
485    BB - optionally different preconditioning matrix
486    str - flag indicating matrix structure
487 
488 */
489 PetscErrorCode RHSMatrixAdvectiongllDM(TS ts, PetscReal t, Vec X, Mat A, Mat BB, void *ctx)
490 {
491   PetscReal **temp;
492   AppCtx     *appctx = (AppCtx *)ctx; /* user-defined application context */
493   PetscInt    xs, xn, l, j;
494   PetscInt   *rowsDM;
495   PetscBool   flg = PETSC_FALSE;
496 
497   PetscFunctionBeginUser;
498   PetscCall(PetscOptionsGetBool(NULL, NULL, "-gll_mf", &flg, NULL));
499 
500   if (!flg) {
501     /*
502      Creates the advection matrix for the given gll
503      */
504     PetscCall(PetscGaussLobattoLegendreElementAdvectionCreate(appctx->SEMop.gll.n, appctx->SEMop.gll.nodes, appctx->SEMop.gll.weights, &temp));
505     PetscCall(MatSetOption(A, MAT_NEW_NONZERO_ALLOCATION_ERR, PETSC_FALSE));
506     PetscCall(DMDAGetCorners(appctx->da, &xs, NULL, NULL, &xn, NULL, NULL));
507     xs = xs / (appctx->param.N - 1);
508     xn = xn / (appctx->param.N - 1);
509 
510     PetscCall(PetscMalloc1(appctx->param.N, &rowsDM));
511     for (j = xs; j < xs + xn; j++) {
512       for (l = 0; l < appctx->param.N; l++) rowsDM[l] = 1 + (j - xs) * (appctx->param.N - 1) + l;
513       PetscCall(MatSetValuesLocal(A, appctx->param.N, rowsDM, appctx->param.N, rowsDM, &temp[0][0], ADD_VALUES));
514     }
515     PetscCall(PetscFree(rowsDM));
516     PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY));
517     PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY));
518 
519     PetscCall(VecReciprocal(appctx->SEMop.mass));
520     PetscCall(MatDiagonalScale(A, appctx->SEMop.mass, 0));
521     PetscCall(VecReciprocal(appctx->SEMop.mass));
522 
523     PetscCall(PetscGaussLobattoLegendreElementAdvectionDestroy(appctx->SEMop.gll.n, appctx->SEMop.gll.nodes, appctx->SEMop.gll.weights, &temp));
524   } else {
525     PetscCall(MatSetType(A, MATSHELL));
526     PetscCall(MatSetUp(A));
527     PetscCall(MatShellSetContext(A, appctx));
528     PetscCall(MatShellSetOperation(A, MATOP_MULT, (void (*)(void))MatMult_Advection));
529   }
530   PetscFunctionReturn(PETSC_SUCCESS);
531 }
532 
533 /*TEST
534 
535     build:
536       requires: !complex
537 
538     test:
539       suffix: 1
540       requires: !single
541 
542     test:
543       suffix: 2
544       nsize: 5
545       requires: !single
546 
547     test:
548       suffix: 3
549       requires: !single
550       args: -ts_view -ts_type beuler -gll_mf -pc_type none -ts_max_steps 5 -ts_monitor_error
551 
552     test:
553       suffix: 4
554       requires: !single
555       args: -ts_view -ts_type beuler -pc_type none -ts_max_steps 5 -ts_monitor_error
556 
557 TEST*/
558