xref: /petsc/src/ts/tutorials/ex50.c (revision 2fa40bb9206b96114faa7cb222621ec184d31cd2)
1 
2 static char help[] ="Solves one dimensional Burger's equation compares with exact solution\n\n";
3 
4 /*
5 
6     Not yet tested in parallel
7 
8 */
9 /*
10    Concepts: TS^time-dependent nonlinear problems
11    Concepts: TS^Burger's equation
12    Processors: n
13 */
14 
15 /* ------------------------------------------------------------------------
16 
17    This program uses the one-dimensional Burger's equation
18        u_t = mu*u_xx - u u_x,
19    on the domain 0 <= x <= 1, with periodic boundary conditions
20 
21    The operators are discretized with the spectral element method
22 
23    See the paper PDE-CONSTRAINED OPTIMIZATION WITH SPECTRAL ELEMENTS USING PETSC AND TAO
24    by OANA MARIN, EMIL CONSTANTINESCU, AND BARRY SMITH for details on the exact solution
25    used
26 
27    See src/tao/unconstrained/tutorials/burgers_spectral.c
28 
29   ------------------------------------------------------------------------- */
30 
31 #include <petscts.h>
32 #include <petscdt.h>
33 #include <petscdraw.h>
34 #include <petscdmda.h>
35 
36 /*
37    User-defined application context - contains data needed by the
38    application-provided call-back routines.
39 */
40 
41 typedef struct {
42   PetscInt  n;                /* number of nodes */
43   PetscReal *nodes;           /* GLL nodes */
44   PetscReal *weights;         /* GLL weights */
45 } PetscGLL;
46 
47 typedef struct {
48   PetscInt    N;             /* grid points per elements*/
49   PetscInt    E;              /* number of elements */
50   PetscReal   tol_L2,tol_max; /* error norms */
51   PetscInt    steps;          /* number of timesteps */
52   PetscReal   Tend;           /* endtime */
53   PetscReal   mu;             /* viscosity */
54   PetscReal   L;              /* total length of domain */
55   PetscReal   Le;
56   PetscReal   Tadj;
57 } PetscParam;
58 
59 typedef struct {
60   Vec         grid;              /* total grid */
61   Vec         curr_sol;
62 } PetscData;
63 
64 typedef struct {
65   Vec         grid;              /* total grid */
66   Vec         mass;              /* mass matrix for total integration */
67   Mat         stiff;             /* stifness matrix */
68   Mat         keptstiff;
69   Mat         grad;
70   PetscGLL    gll;
71 } PetscSEMOperators;
72 
73 typedef struct {
74   DM                da;                /* distributed array data structure */
75   PetscSEMOperators SEMop;
76   PetscParam        param;
77   PetscData         dat;
78   TS                ts;
79   PetscReal         initial_dt;
80 } AppCtx;
81 
82 /*
83    User-defined routines
84 */
85 extern PetscErrorCode RHSMatrixLaplaciangllDM(TS,PetscReal,Vec,Mat,Mat,void*);
86 extern PetscErrorCode RHSMatrixAdvectiongllDM(TS,PetscReal,Vec,Mat,Mat,void*);
87 extern PetscErrorCode TrueSolution(TS,PetscReal,Vec,AppCtx*);
88 extern PetscErrorCode RHSFunction(TS,PetscReal,Vec,Vec,void*);
89 extern PetscErrorCode RHSJacobian(TS,PetscReal,Vec,Mat,Mat,void*);
90 
91 int main(int argc,char **argv)
92 {
93   AppCtx         appctx;                 /* user-defined application context */
94   PetscErrorCode ierr;
95   PetscInt       i, xs, xm, ind, j, lenglob;
96   PetscReal      x, *wrk_ptr1, *wrk_ptr2;
97   MatNullSpace   nsp;
98   PetscMPIInt    size;
99 
100    /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
101      Initialize program and set problem parameters
102      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
103   PetscFunctionBegin;
104 
105   ierr = PetscInitialize(&argc,&argv,(char*)0,help);if (ierr) return ierr;
106 
107   /*initialize parameters */
108   appctx.param.N    = 10;  /* order of the spectral element */
109   appctx.param.E    = 10;  /* number of elements */
110   appctx.param.L    = 4.0;  /* length of the domain */
111   appctx.param.mu   = 0.01; /* diffusion coefficient */
112   appctx.initial_dt = 5e-3;
113   appctx.param.steps = PETSC_MAX_INT;
114   appctx.param.Tend  = 4;
115 
116   ierr = PetscOptionsGetInt(NULL,NULL,"-N",&appctx.param.N,NULL);CHKERRQ(ierr);
117   ierr = PetscOptionsGetInt(NULL,NULL,"-E",&appctx.param.E,NULL);CHKERRQ(ierr);
118   ierr = PetscOptionsGetReal(NULL,NULL,"-Tend",&appctx.param.Tend,NULL);CHKERRQ(ierr);
119   ierr = PetscOptionsGetReal(NULL,NULL,"-mu",&appctx.param.mu,NULL);CHKERRQ(ierr);
120   appctx.param.Le = appctx.param.L/appctx.param.E;
121 
122   ierr = MPI_Comm_size(PETSC_COMM_WORLD,&size);CHKERRMPI(ierr);
123   if (appctx.param.E % size) SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_ARG_WRONG,"Number of elements must be divisible by number of processes");
124 
125   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
126      Create GLL data structures
127      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
128   ierr = PetscMalloc2(appctx.param.N,&appctx.SEMop.gll.nodes,appctx.param.N,&appctx.SEMop.gll.weights);CHKERRQ(ierr);
129   ierr = PetscDTGaussLobattoLegendreQuadrature(appctx.param.N,PETSCGAUSSLOBATTOLEGENDRE_VIA_LINEAR_ALGEBRA,appctx.SEMop.gll.nodes,appctx.SEMop.gll.weights);CHKERRQ(ierr);
130   appctx.SEMop.gll.n = appctx.param.N;
131   lenglob  = appctx.param.E*(appctx.param.N-1);
132 
133   /*
134      Create distributed array (DMDA) to manage parallel grid and vectors
135      and to set up the ghost point communication pattern.  There are E*(Nl-1)+1
136      total grid values spread equally among all the processors, except first and last
137   */
138 
139   ierr = DMDACreate1d(PETSC_COMM_WORLD,DM_BOUNDARY_PERIODIC,lenglob,1,1,NULL,&appctx.da);CHKERRQ(ierr);
140   ierr = DMSetFromOptions(appctx.da);CHKERRQ(ierr);
141   ierr = DMSetUp(appctx.da);CHKERRQ(ierr);
142 
143   /*
144      Extract global and local vectors from DMDA; we use these to store the
145      approximate solution.  Then duplicate these for remaining vectors that
146      have the same types.
147   */
148 
149   ierr = DMCreateGlobalVector(appctx.da,&appctx.dat.curr_sol);CHKERRQ(ierr);
150   ierr = VecDuplicate(appctx.dat.curr_sol,&appctx.SEMop.grid);CHKERRQ(ierr);
151   ierr = VecDuplicate(appctx.dat.curr_sol,&appctx.SEMop.mass);CHKERRQ(ierr);
152 
153   ierr = DMDAGetCorners(appctx.da,&xs,NULL,NULL,&xm,NULL,NULL);CHKERRQ(ierr);
154   ierr = DMDAVecGetArray(appctx.da,appctx.SEMop.grid,&wrk_ptr1);CHKERRQ(ierr);
155   ierr = DMDAVecGetArray(appctx.da,appctx.SEMop.mass,&wrk_ptr2);CHKERRQ(ierr);
156 
157   /* Compute function over the locally owned part of the grid */
158 
159     xs=xs/(appctx.param.N-1);
160     xm=xm/(appctx.param.N-1);
161 
162   /*
163      Build total grid and mass over entire mesh (multi-elemental)
164   */
165 
166   for (i=xs; i<xs+xm; i++) {
167     for (j=0; j<appctx.param.N-1; j++) {
168       x = (appctx.param.Le/2.0)*(appctx.SEMop.gll.nodes[j]+1.0)+appctx.param.Le*i;
169       ind=i*(appctx.param.N-1)+j;
170       wrk_ptr1[ind]=x;
171       wrk_ptr2[ind]=.5*appctx.param.Le*appctx.SEMop.gll.weights[j];
172       if (j==0) wrk_ptr2[ind]+=.5*appctx.param.Le*appctx.SEMop.gll.weights[j];
173     }
174   }
175   ierr = DMDAVecRestoreArray(appctx.da,appctx.SEMop.grid,&wrk_ptr1);CHKERRQ(ierr);
176   ierr = DMDAVecRestoreArray(appctx.da,appctx.SEMop.mass,&wrk_ptr2);CHKERRQ(ierr);
177 
178   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
179    Create matrix data structure; set matrix evaluation routine.
180    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
181   ierr = DMSetMatrixPreallocateOnly(appctx.da, PETSC_TRUE);CHKERRQ(ierr);
182   ierr = DMCreateMatrix(appctx.da,&appctx.SEMop.stiff);CHKERRQ(ierr);
183   ierr = DMCreateMatrix(appctx.da,&appctx.SEMop.grad);CHKERRQ(ierr);
184   /*
185    For linear problems with a time-dependent f(u,t) in the equation
186    u_t = f(u,t), the user provides the discretized right-hand-side
187    as a time-dependent matrix.
188    */
189   ierr = RHSMatrixLaplaciangllDM(appctx.ts,0.0,appctx.dat.curr_sol,appctx.SEMop.stiff,appctx.SEMop.stiff,&appctx);CHKERRQ(ierr);
190   ierr = RHSMatrixAdvectiongllDM(appctx.ts,0.0,appctx.dat.curr_sol,appctx.SEMop.grad,appctx.SEMop.grad,&appctx);CHKERRQ(ierr);
191    /*
192        For linear problems with a time-dependent f(u,t) in the equation
193        u_t = f(u,t), the user provides the discretized right-hand-side
194        as a time-dependent matrix.
195     */
196 
197   ierr = MatDuplicate(appctx.SEMop.stiff,MAT_COPY_VALUES,&appctx.SEMop.keptstiff);CHKERRQ(ierr);
198 
199   /* attach the null space to the matrix, this probably is not needed but does no harm */
200   ierr = MatNullSpaceCreate(PETSC_COMM_WORLD,PETSC_TRUE,0,NULL,&nsp);CHKERRQ(ierr);
201   ierr = MatSetNullSpace(appctx.SEMop.stiff,nsp);CHKERRQ(ierr);
202   ierr = MatSetNullSpace(appctx.SEMop.keptstiff,nsp);CHKERRQ(ierr);
203   ierr = MatNullSpaceTest(nsp,appctx.SEMop.stiff,NULL);CHKERRQ(ierr);
204   ierr = MatNullSpaceDestroy(&nsp);CHKERRQ(ierr);
205   /* attach the null space to the matrix, this probably is not needed but does no harm */
206   ierr = MatNullSpaceCreate(PETSC_COMM_WORLD,PETSC_TRUE,0,NULL,&nsp);CHKERRQ(ierr);
207   ierr = MatSetNullSpace(appctx.SEMop.grad,nsp);CHKERRQ(ierr);
208   ierr = MatNullSpaceTest(nsp,appctx.SEMop.grad,NULL);CHKERRQ(ierr);
209   ierr = MatNullSpaceDestroy(&nsp);CHKERRQ(ierr);
210 
211   /* Create the TS solver that solves the ODE and its adjoint; set its options */
212   ierr = TSCreate(PETSC_COMM_WORLD,&appctx.ts);CHKERRQ(ierr);
213   ierr = TSSetProblemType(appctx.ts,TS_NONLINEAR);CHKERRQ(ierr);
214   ierr = TSSetType(appctx.ts,TSRK);CHKERRQ(ierr);
215   ierr = TSSetDM(appctx.ts,appctx.da);CHKERRQ(ierr);
216   ierr = TSSetTime(appctx.ts,0.0);CHKERRQ(ierr);
217   ierr = TSSetTimeStep(appctx.ts,appctx.initial_dt);CHKERRQ(ierr);
218   ierr = TSSetMaxSteps(appctx.ts,appctx.param.steps);CHKERRQ(ierr);
219   ierr = TSSetMaxTime(appctx.ts,appctx.param.Tend);CHKERRQ(ierr);
220   ierr = TSSetExactFinalTime(appctx.ts,TS_EXACTFINALTIME_MATCHSTEP);CHKERRQ(ierr);
221   ierr = TSSetTolerances(appctx.ts,1e-7,NULL,1e-7,NULL);CHKERRQ(ierr);
222   ierr = TSSetSaveTrajectory(appctx.ts);CHKERRQ(ierr);
223   ierr = TSSetFromOptions(appctx.ts);CHKERRQ(ierr);
224   ierr = TSSetRHSFunction(appctx.ts,NULL,RHSFunction,&appctx);CHKERRQ(ierr);
225   ierr = TSSetRHSJacobian(appctx.ts,appctx.SEMop.stiff,appctx.SEMop.stiff,RHSJacobian,&appctx);CHKERRQ(ierr);
226 
227   /* Set Initial conditions for the problem  */
228   ierr = TrueSolution(appctx.ts,0,appctx.dat.curr_sol,&appctx);CHKERRQ(ierr);
229 
230   ierr = TSSetSolutionFunction(appctx.ts,(PetscErrorCode (*)(TS,PetscReal,Vec,void *))TrueSolution,&appctx);CHKERRQ(ierr);
231   ierr = TSSetTime(appctx.ts,0.0);CHKERRQ(ierr);
232   ierr = TSSetStepNumber(appctx.ts,0);CHKERRQ(ierr);
233 
234   ierr = TSSolve(appctx.ts,appctx.dat.curr_sol);CHKERRQ(ierr);
235 
236   ierr = MatDestroy(&appctx.SEMop.stiff);CHKERRQ(ierr);
237   ierr = MatDestroy(&appctx.SEMop.keptstiff);CHKERRQ(ierr);
238   ierr = MatDestroy(&appctx.SEMop.grad);CHKERRQ(ierr);
239   ierr = VecDestroy(&appctx.SEMop.grid);CHKERRQ(ierr);
240   ierr = VecDestroy(&appctx.SEMop.mass);CHKERRQ(ierr);
241   ierr = VecDestroy(&appctx.dat.curr_sol);CHKERRQ(ierr);
242   ierr = PetscFree2(appctx.SEMop.gll.nodes,appctx.SEMop.gll.weights);CHKERRQ(ierr);
243   ierr = DMDestroy(&appctx.da);CHKERRQ(ierr);
244   ierr = TSDestroy(&appctx.ts);CHKERRQ(ierr);
245 
246   /*
247      Always call PetscFinalize() before exiting a program.  This routine
248        - finalizes the PETSc libraries as well as MPI
249        - provides summary and diagnostic information if certain runtime
250          options are chosen (e.g., -log_summary).
251   */
252     ierr = PetscFinalize();
253     return ierr;
254 }
255 
256 /*
257    TrueSolution() computes the true solution for the PDE
258 
259    Input Parameter:
260    u - uninitialized solution vector (global)
261    appctx - user-defined application context
262 
263    Output Parameter:
264    u - vector with solution at initial time (global)
265 */
266 PetscErrorCode TrueSolution(TS ts, PetscReal t, Vec u,AppCtx *appctx)
267 {
268   PetscScalar       *s;
269   const PetscScalar *xg;
270   PetscErrorCode    ierr;
271   PetscInt          i,xs,xn;
272 
273   ierr = DMDAVecGetArray(appctx->da,u,&s);CHKERRQ(ierr);
274   ierr = DMDAVecGetArrayRead(appctx->da,appctx->SEMop.grid,(void*)&xg);CHKERRQ(ierr);
275   ierr = DMDAGetCorners(appctx->da,&xs,NULL,NULL,&xn,NULL,NULL);CHKERRQ(ierr);
276   for (i=xs; i<xs+xn; i++) {
277     s[i]=2.0*appctx->param.mu*PETSC_PI*PetscSinScalar(PETSC_PI*xg[i])*PetscExpReal(-appctx->param.mu*PETSC_PI*PETSC_PI*t)/(2.0+PetscCosScalar(PETSC_PI*xg[i])*PetscExpReal(-appctx->param.mu*PETSC_PI*PETSC_PI*t));
278   }
279   ierr = DMDAVecRestoreArray(appctx->da,u,&s);CHKERRQ(ierr);
280   ierr = DMDAVecRestoreArrayRead(appctx->da,appctx->SEMop.grid,(void*)&xg);CHKERRQ(ierr);
281   return 0;
282 }
283 
284 PetscErrorCode RHSFunction(TS ts,PetscReal t,Vec globalin,Vec globalout,void *ctx)
285 {
286   PetscErrorCode ierr;
287   AppCtx          *appctx = (AppCtx*)ctx;
288 
289   PetscFunctionBegin;
290   ierr = MatMult(appctx->SEMop.grad,globalin,globalout);CHKERRQ(ierr); /* grad u */
291   ierr = VecPointwiseMult(globalout,globalin,globalout);CHKERRQ(ierr); /* u grad u */
292   ierr = VecScale(globalout, -1.0);CHKERRQ(ierr);
293   ierr = MatMultAdd(appctx->SEMop.keptstiff,globalin,globalout,globalout);CHKERRQ(ierr);
294   PetscFunctionReturn(0);
295 }
296 
297 /*
298 
299       K is the discretiziation of the Laplacian
300       G is the discretization of the gradient
301 
302       Computes Jacobian of      K u + diag(u) G u   which is given by
303               K   + diag(u)G + diag(Gu)
304 */
305 PetscErrorCode RHSJacobian(TS ts,PetscReal t,Vec globalin,Mat A, Mat B,void *ctx)
306 {
307   PetscErrorCode ierr;
308   AppCtx         *appctx = (AppCtx*)ctx;
309   Vec            Gglobalin;
310 
311   PetscFunctionBegin;
312   /*    A = diag(u) G */
313 
314   ierr = MatCopy(appctx->SEMop.grad,A,SAME_NONZERO_PATTERN);CHKERRQ(ierr);
315   ierr = MatDiagonalScale(A,globalin,NULL);CHKERRQ(ierr);
316 
317   /*    A  = A + diag(Gu) */
318   ierr = VecDuplicate(globalin,&Gglobalin);CHKERRQ(ierr);
319   ierr = MatMult(appctx->SEMop.grad,globalin,Gglobalin);CHKERRQ(ierr);
320   ierr = MatDiagonalSet(A,Gglobalin,ADD_VALUES);CHKERRQ(ierr);
321   ierr = VecDestroy(&Gglobalin);CHKERRQ(ierr);
322 
323   /*   A  = K - A    */
324   ierr = MatScale(A,-1.0);CHKERRQ(ierr);
325   ierr = MatAXPY(A,0.0,appctx->SEMop.keptstiff,SAME_NONZERO_PATTERN);CHKERRQ(ierr);
326   PetscFunctionReturn(0);
327 }
328 
329 /* --------------------------------------------------------------------- */
330 
331 #include "petscblaslapack.h"
332 /*
333      Matrix free operation of 1d Laplacian and Grad for GLL spectral elements
334 */
335 PetscErrorCode MatMult_Laplacian(Mat A,Vec x,Vec y)
336 {
337   AppCtx            *appctx;
338   PetscErrorCode    ierr;
339   PetscReal         **temp,vv;
340   PetscInt          i,j,xs,xn;
341   Vec               xlocal,ylocal;
342   const PetscScalar *xl;
343   PetscScalar       *yl;
344   PetscBLASInt      _One = 1,n;
345   PetscScalar       _DOne = 1;
346 
347   ierr = MatShellGetContext(A,&appctx);CHKERRQ(ierr);
348   ierr = DMGetLocalVector(appctx->da,&xlocal);CHKERRQ(ierr);
349   ierr = DMGlobalToLocalBegin(appctx->da,x,INSERT_VALUES,xlocal);CHKERRQ(ierr);
350   ierr = DMGlobalToLocalEnd(appctx->da,x,INSERT_VALUES,xlocal);CHKERRQ(ierr);
351   ierr = DMGetLocalVector(appctx->da,&ylocal);CHKERRQ(ierr);
352   ierr = VecSet(ylocal,0.0);CHKERRQ(ierr);
353   ierr = PetscGaussLobattoLegendreElementLaplacianCreate(appctx->SEMop.gll.n,appctx->SEMop.gll.nodes,appctx->SEMop.gll.weights,&temp);CHKERRQ(ierr);
354   for (i=0; i<appctx->param.N; i++) {
355     vv =-appctx->param.mu*2.0/appctx->param.Le;
356     for (j=0; j<appctx->param.N; j++) temp[i][j]=temp[i][j]*vv;
357   }
358   ierr = DMDAVecGetArrayRead(appctx->da,xlocal,(void*)&xl);CHKERRQ(ierr);
359   ierr = DMDAVecGetArray(appctx->da,ylocal,&yl);CHKERRQ(ierr);
360   ierr = DMDAGetCorners(appctx->da,&xs,NULL,NULL,&xn,NULL,NULL);CHKERRQ(ierr);
361   ierr = PetscBLASIntCast(appctx->param.N,&n);CHKERRQ(ierr);
362   for (j=xs; j<xs+xn; j += appctx->param.N-1) {
363     PetscStackCallBLAS("BLASgemv",BLASgemv_("N",&n,&n,&_DOne,&temp[0][0],&n,&xl[j],&_One,&_DOne,&yl[j],&_One));
364   }
365   ierr = DMDAVecRestoreArrayRead(appctx->da,xlocal,(void*)&xl);CHKERRQ(ierr);
366   ierr = DMDAVecRestoreArray(appctx->da,ylocal,&yl);CHKERRQ(ierr);
367   ierr = PetscGaussLobattoLegendreElementLaplacianDestroy(appctx->SEMop.gll.n,appctx->SEMop.gll.nodes,appctx->SEMop.gll.weights,&temp);CHKERRQ(ierr);
368   ierr = VecSet(y,0.0);CHKERRQ(ierr);
369   ierr = DMLocalToGlobalBegin(appctx->da,ylocal,ADD_VALUES,y);CHKERRQ(ierr);
370   ierr = DMLocalToGlobalEnd(appctx->da,ylocal,ADD_VALUES,y);CHKERRQ(ierr);
371   ierr = DMRestoreLocalVector(appctx->da,&xlocal);CHKERRQ(ierr);
372   ierr = DMRestoreLocalVector(appctx->da,&ylocal);CHKERRQ(ierr);
373   ierr = VecPointwiseDivide(y,y,appctx->SEMop.mass);CHKERRQ(ierr);
374   return 0;
375 }
376 
377 PetscErrorCode MatMult_Advection(Mat A,Vec x,Vec y)
378 {
379   AppCtx            *appctx;
380   PetscErrorCode    ierr;
381   PetscReal         **temp;
382   PetscInt          j,xs,xn;
383   Vec               xlocal,ylocal;
384   const PetscScalar *xl;
385   PetscScalar       *yl;
386   PetscBLASInt      _One = 1,n;
387   PetscScalar       _DOne = 1;
388 
389   ierr = MatShellGetContext(A,&appctx);CHKERRQ(ierr);
390   ierr = DMGetLocalVector(appctx->da,&xlocal);CHKERRQ(ierr);
391   ierr = DMGlobalToLocalBegin(appctx->da,x,INSERT_VALUES,xlocal);CHKERRQ(ierr);
392   ierr = DMGlobalToLocalEnd(appctx->da,x,INSERT_VALUES,xlocal);CHKERRQ(ierr);
393   ierr = DMGetLocalVector(appctx->da,&ylocal);CHKERRQ(ierr);
394   ierr = VecSet(ylocal,0.0);CHKERRQ(ierr);
395   ierr = PetscGaussLobattoLegendreElementAdvectionCreate(appctx->SEMop.gll.n,appctx->SEMop.gll.nodes,appctx->SEMop.gll.weights,&temp);CHKERRQ(ierr);
396   ierr = DMDAVecGetArrayRead(appctx->da,xlocal,(void*)&xl);CHKERRQ(ierr);
397   ierr = DMDAVecGetArray(appctx->da,ylocal,&yl);CHKERRQ(ierr);
398   ierr = DMDAGetCorners(appctx->da,&xs,NULL,NULL,&xn,NULL,NULL);CHKERRQ(ierr);
399   ierr = PetscBLASIntCast(appctx->param.N,&n);CHKERRQ(ierr);
400   for (j=xs; j<xs+xn; j += appctx->param.N-1) {
401     PetscStackCallBLAS("BLASgemv",BLASgemv_("N",&n,&n,&_DOne,&temp[0][0],&n,&xl[j],&_One,&_DOne,&yl[j],&_One));
402   }
403   ierr = DMDAVecRestoreArrayRead(appctx->da,xlocal,(void*)&xl);CHKERRQ(ierr);
404   ierr = DMDAVecRestoreArray(appctx->da,ylocal,&yl);CHKERRQ(ierr);
405   ierr = PetscGaussLobattoLegendreElementAdvectionDestroy(appctx->SEMop.gll.n,appctx->SEMop.gll.nodes,appctx->SEMop.gll.weights,&temp);CHKERRQ(ierr);
406   ierr = VecSet(y,0.0);CHKERRQ(ierr);
407   ierr = DMLocalToGlobalBegin(appctx->da,ylocal,ADD_VALUES,y);CHKERRQ(ierr);
408   ierr = DMLocalToGlobalEnd(appctx->da,ylocal,ADD_VALUES,y);CHKERRQ(ierr);
409   ierr = DMRestoreLocalVector(appctx->da,&xlocal);CHKERRQ(ierr);
410   ierr = DMRestoreLocalVector(appctx->da,&ylocal);CHKERRQ(ierr);
411   ierr = VecPointwiseDivide(y,y,appctx->SEMop.mass);CHKERRQ(ierr);
412   ierr = VecScale(y,-1.0);CHKERRQ(ierr);
413   return 0;
414 }
415 
416 /*
417    RHSMatrixLaplacian - User-provided routine to compute the right-hand-side
418    matrix for the Laplacian operator
419 
420    Input Parameters:
421    ts - the TS context
422    t - current time  (ignored)
423    X - current solution (ignored)
424    dummy - optional user-defined context, as set by TSetRHSJacobian()
425 
426    Output Parameters:
427    AA - Jacobian matrix
428    BB - optionally different matrix from which the preconditioner is built
429    str - flag indicating matrix structure
430 
431 */
432 PetscErrorCode RHSMatrixLaplaciangllDM(TS ts,PetscReal t,Vec X,Mat A,Mat BB,void *ctx)
433 {
434   PetscReal      **temp;
435   PetscReal      vv;
436   AppCtx         *appctx = (AppCtx*)ctx;     /* user-defined application context */
437   PetscErrorCode ierr;
438   PetscInt       i,xs,xn,l,j;
439   PetscInt       *rowsDM;
440   PetscBool      flg = PETSC_FALSE;
441 
442   ierr = PetscOptionsGetBool(NULL,NULL,"-gll_mf",&flg,NULL);CHKERRQ(ierr);
443 
444   if (!flg) {
445     /*
446      Creates the element stiffness matrix for the given gll
447      */
448     ierr = PetscGaussLobattoLegendreElementLaplacianCreate(appctx->SEMop.gll.n,appctx->SEMop.gll.nodes,appctx->SEMop.gll.weights,&temp);CHKERRQ(ierr);
449     /* workaround for clang analyzer warning: Division by zero */
450     if (appctx->param.N <= 1) SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_ARG_WRONG,"Spectral element order should be > 1");
451 
452     /* scale by the size of the element */
453     for (i=0; i<appctx->param.N; i++) {
454       vv=-appctx->param.mu*2.0/appctx->param.Le;
455       for (j=0; j<appctx->param.N; j++) temp[i][j]=temp[i][j]*vv;
456     }
457 
458     ierr = MatSetOption(A,MAT_NEW_NONZERO_ALLOCATION_ERR,PETSC_FALSE);CHKERRQ(ierr);
459     ierr = DMDAGetCorners(appctx->da,&xs,NULL,NULL,&xn,NULL,NULL);CHKERRQ(ierr);
460 
461     xs   = xs/(appctx->param.N-1);
462     xn   = xn/(appctx->param.N-1);
463 
464     ierr = PetscMalloc1(appctx->param.N,&rowsDM);CHKERRQ(ierr);
465     /*
466      loop over local elements
467      */
468     for (j=xs; j<xs+xn; j++) {
469       for (l=0; l<appctx->param.N; l++) {
470         rowsDM[l] = 1+(j-xs)*(appctx->param.N-1)+l;
471       }
472       ierr = MatSetValuesLocal(A,appctx->param.N,rowsDM,appctx->param.N,rowsDM,&temp[0][0],ADD_VALUES);CHKERRQ(ierr);
473     }
474     ierr = PetscFree(rowsDM);CHKERRQ(ierr);
475     ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
476     ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
477     ierr = VecReciprocal(appctx->SEMop.mass);CHKERRQ(ierr);
478     ierr = MatDiagonalScale(A,appctx->SEMop.mass,0);CHKERRQ(ierr);
479     ierr = VecReciprocal(appctx->SEMop.mass);CHKERRQ(ierr);
480 
481     ierr = PetscGaussLobattoLegendreElementLaplacianDestroy(appctx->SEMop.gll.n,appctx->SEMop.gll.nodes,appctx->SEMop.gll.weights,&temp);CHKERRQ(ierr);
482   } else {
483     ierr = MatSetType(A,MATSHELL);CHKERRQ(ierr);
484     ierr = MatSetUp(A);CHKERRQ(ierr);
485     ierr = MatShellSetContext(A,appctx);CHKERRQ(ierr);
486     ierr = MatShellSetOperation(A,MATOP_MULT,(void (*)(void))MatMult_Laplacian);CHKERRQ(ierr);
487   }
488   return 0;
489 }
490 
491 /*
492    RHSMatrixAdvection - User-provided routine to compute the right-hand-side
493    matrix for the Advection (gradient) operator.
494 
495    Input Parameters:
496    ts - the TS context
497    t - current time
498    global_in - global input vector
499    dummy - optional user-defined context, as set by TSetRHSJacobian()
500 
501    Output Parameters:
502    AA - Jacobian matrix
503    BB - optionally different preconditioning matrix
504    str - flag indicating matrix structure
505 
506 */
507 PetscErrorCode RHSMatrixAdvectiongllDM(TS ts,PetscReal t,Vec X,Mat A,Mat BB,void *ctx)
508 {
509   PetscReal      **temp;
510   AppCtx         *appctx = (AppCtx*)ctx;     /* user-defined application context */
511   PetscErrorCode ierr;
512   PetscInt       xs,xn,l,j;
513   PetscInt       *rowsDM;
514   PetscBool      flg = PETSC_FALSE;
515 
516   ierr = PetscOptionsGetBool(NULL,NULL,"-gll_mf",&flg,NULL);CHKERRQ(ierr);
517 
518   if (!flg) {
519     /*
520      Creates the advection matrix for the given gll
521      */
522     ierr = PetscGaussLobattoLegendreElementAdvectionCreate(appctx->SEMop.gll.n,appctx->SEMop.gll.nodes,appctx->SEMop.gll.weights,&temp);CHKERRQ(ierr);
523     ierr = MatSetOption(A,MAT_NEW_NONZERO_ALLOCATION_ERR,PETSC_FALSE);CHKERRQ(ierr);
524     ierr = DMDAGetCorners(appctx->da,&xs,NULL,NULL,&xn,NULL,NULL);CHKERRQ(ierr);
525     xs   = xs/(appctx->param.N-1);
526     xn   = xn/(appctx->param.N-1);
527 
528     ierr = PetscMalloc1(appctx->param.N,&rowsDM);CHKERRQ(ierr);
529     for (j=xs; j<xs+xn; j++) {
530       for (l=0; l<appctx->param.N; l++) {
531         rowsDM[l] = 1+(j-xs)*(appctx->param.N-1)+l;
532       }
533       ierr = MatSetValuesLocal(A,appctx->param.N,rowsDM,appctx->param.N,rowsDM,&temp[0][0],ADD_VALUES);CHKERRQ(ierr);
534     }
535     ierr = PetscFree(rowsDM);CHKERRQ(ierr);
536     ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
537     ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
538 
539     ierr = VecReciprocal(appctx->SEMop.mass);CHKERRQ(ierr);
540     ierr = MatDiagonalScale(A,appctx->SEMop.mass,0);CHKERRQ(ierr);
541     ierr = VecReciprocal(appctx->SEMop.mass);CHKERRQ(ierr);
542 
543     ierr = PetscGaussLobattoLegendreElementAdvectionDestroy(appctx->SEMop.gll.n,appctx->SEMop.gll.nodes,appctx->SEMop.gll.weights,&temp);CHKERRQ(ierr);
544   } else {
545     ierr = MatSetType(A,MATSHELL);CHKERRQ(ierr);
546     ierr = MatSetUp(A);CHKERRQ(ierr);
547     ierr = MatShellSetContext(A,appctx);CHKERRQ(ierr);
548     ierr = MatShellSetOperation(A,MATOP_MULT,(void (*)(void))MatMult_Advection);CHKERRQ(ierr);
549   }
550   return 0;
551 }
552 
553 /*TEST
554 
555     build:
556       requires: !complex
557 
558     test:
559       suffix: 1
560       requires: !single
561 
562     test:
563       suffix: 2
564       nsize: 5
565       requires: !single
566 
567     test:
568       suffix: 3
569       requires: !single
570       args: -ts_view  -ts_type beuler -gll_mf -pc_type none -ts_max_steps 5 -ts_monitor_error
571 
572     test:
573       suffix: 4
574       requires: !single
575       args: -ts_view  -ts_type beuler  -pc_type none -ts_max_steps 5 -ts_monitor_error
576 
577 TEST*/
578