xref: /petsc/src/ts/tutorials/ex50.c (revision 40badf4fbc550ac1f60bd080eaff6de6d55b946d)
1 
2 static char help[] ="Solves one dimensional Burger's equation compares with exact solution\n\n";
3 
4 /*
5 
6     Not yet tested in parallel
7 
8 */
9 /*
10    Concepts: TS^time-dependent nonlinear problems
11    Concepts: TS^Burger's equation
12    Processors: n
13 */
14 
15 /* ------------------------------------------------------------------------
16 
17    This program uses the one-dimensional Burger's equation
18        u_t = mu*u_xx - u u_x,
19    on the domain 0 <= x <= 1, with periodic boundary conditions
20 
21    The operators are discretized with the spectral element method
22 
23    See the paper PDE-CONSTRAINED OPTIMIZATION WITH SPECTRAL ELEMENTS USING PETSC AND TAO
24    by OANA MARIN, EMIL CONSTANTINESCU, AND BARRY SMITH for details on the exact solution
25    used
26 
27    See src/tao/unconstrained/tutorials/burgers_spectral.c
28 
29   ------------------------------------------------------------------------- */
30 
31 #include <petscts.h>
32 #include <petscdt.h>
33 #include <petscdraw.h>
34 #include <petscdmda.h>
35 
36 /*
37    User-defined application context - contains data needed by the
38    application-provided call-back routines.
39 */
40 
41 typedef struct {
42   PetscInt  n;                /* number of nodes */
43   PetscReal *nodes;           /* GLL nodes */
44   PetscReal *weights;         /* GLL weights */
45 } PetscGLL;
46 
47 typedef struct {
48   PetscInt    N;             /* grid points per elements*/
49   PetscInt    E;              /* number of elements */
50   PetscReal   tol_L2,tol_max; /* error norms */
51   PetscInt    steps;          /* number of timesteps */
52   PetscReal   Tend;           /* endtime */
53   PetscReal   mu;             /* viscosity */
54   PetscReal   L;              /* total length of domain */
55   PetscReal   Le;
56   PetscReal   Tadj;
57 } PetscParam;
58 
59 typedef struct {
60   Vec         grid;              /* total grid */
61   Vec         curr_sol;
62 } PetscData;
63 
64 typedef struct {
65   Vec         grid;              /* total grid */
66   Vec         mass;              /* mass matrix for total integration */
67   Mat         stiff;             /* stifness matrix */
68   Mat         keptstiff;
69   Mat         grad;
70   PetscGLL    gll;
71 } PetscSEMOperators;
72 
73 typedef struct {
74   DM                da;                /* distributed array data structure */
75   PetscSEMOperators SEMop;
76   PetscParam        param;
77   PetscData         dat;
78   TS                ts;
79   PetscReal         initial_dt;
80 } AppCtx;
81 
82 /*
83    User-defined routines
84 */
85 extern PetscErrorCode RHSMatrixLaplaciangllDM(TS,PetscReal,Vec,Mat,Mat,void*);
86 extern PetscErrorCode RHSMatrixAdvectiongllDM(TS,PetscReal,Vec,Mat,Mat,void*);
87 extern PetscErrorCode TrueSolution(TS,PetscReal,Vec,AppCtx*);
88 extern PetscErrorCode RHSFunction(TS,PetscReal,Vec,Vec,void*);
89 extern PetscErrorCode RHSJacobian(TS,PetscReal,Vec,Mat,Mat,void*);
90 
91 int main(int argc,char **argv)
92 {
93   AppCtx         appctx;                 /* user-defined application context */
94   PetscErrorCode ierr;
95   PetscInt       i, xs, xm, ind, j, lenglob;
96   PetscReal      x, *wrk_ptr1, *wrk_ptr2;
97   MatNullSpace   nsp;
98   PetscMPIInt    size;
99 
100    /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
101      Initialize program and set problem parameters
102      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
103   PetscFunctionBegin;
104 
105   ierr = PetscInitialize(&argc,&argv,(char*)0,help);if (ierr) return ierr;
106 
107   /*initialize parameters */
108   appctx.param.N    = 10;  /* order of the spectral element */
109   appctx.param.E    = 10;  /* number of elements */
110   appctx.param.L    = 4.0;  /* length of the domain */
111   appctx.param.mu   = 0.01; /* diffusion coefficient */
112   appctx.initial_dt = 5e-3;
113   appctx.param.steps = PETSC_MAX_INT;
114   appctx.param.Tend  = 4;
115 
116   CHKERRQ(PetscOptionsGetInt(NULL,NULL,"-N",&appctx.param.N,NULL));
117   CHKERRQ(PetscOptionsGetInt(NULL,NULL,"-E",&appctx.param.E,NULL));
118   CHKERRQ(PetscOptionsGetReal(NULL,NULL,"-Tend",&appctx.param.Tend,NULL));
119   CHKERRQ(PetscOptionsGetReal(NULL,NULL,"-mu",&appctx.param.mu,NULL));
120   appctx.param.Le = appctx.param.L/appctx.param.E;
121 
122   CHKERRMPI(MPI_Comm_size(PETSC_COMM_WORLD,&size));
123   PetscCheck((appctx.param.E % size) == 0,PETSC_COMM_WORLD,PETSC_ERR_ARG_WRONG,"Number of elements must be divisible by number of processes");
124 
125   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
126      Create GLL data structures
127      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
128   CHKERRQ(PetscMalloc2(appctx.param.N,&appctx.SEMop.gll.nodes,appctx.param.N,&appctx.SEMop.gll.weights));
129   CHKERRQ(PetscDTGaussLobattoLegendreQuadrature(appctx.param.N,PETSCGAUSSLOBATTOLEGENDRE_VIA_LINEAR_ALGEBRA,appctx.SEMop.gll.nodes,appctx.SEMop.gll.weights));
130   appctx.SEMop.gll.n = appctx.param.N;
131   lenglob  = appctx.param.E*(appctx.param.N-1);
132 
133   /*
134      Create distributed array (DMDA) to manage parallel grid and vectors
135      and to set up the ghost point communication pattern.  There are E*(Nl-1)+1
136      total grid values spread equally among all the processors, except first and last
137   */
138 
139   CHKERRQ(DMDACreate1d(PETSC_COMM_WORLD,DM_BOUNDARY_PERIODIC,lenglob,1,1,NULL,&appctx.da));
140   CHKERRQ(DMSetFromOptions(appctx.da));
141   CHKERRQ(DMSetUp(appctx.da));
142 
143   /*
144      Extract global and local vectors from DMDA; we use these to store the
145      approximate solution.  Then duplicate these for remaining vectors that
146      have the same types.
147   */
148 
149   CHKERRQ(DMCreateGlobalVector(appctx.da,&appctx.dat.curr_sol));
150   CHKERRQ(VecDuplicate(appctx.dat.curr_sol,&appctx.SEMop.grid));
151   CHKERRQ(VecDuplicate(appctx.dat.curr_sol,&appctx.SEMop.mass));
152 
153   CHKERRQ(DMDAGetCorners(appctx.da,&xs,NULL,NULL,&xm,NULL,NULL));
154   CHKERRQ(DMDAVecGetArray(appctx.da,appctx.SEMop.grid,&wrk_ptr1));
155   CHKERRQ(DMDAVecGetArray(appctx.da,appctx.SEMop.mass,&wrk_ptr2));
156 
157   /* Compute function over the locally owned part of the grid */
158 
159     xs=xs/(appctx.param.N-1);
160     xm=xm/(appctx.param.N-1);
161 
162   /*
163      Build total grid and mass over entire mesh (multi-elemental)
164   */
165 
166   for (i=xs; i<xs+xm; i++) {
167     for (j=0; j<appctx.param.N-1; j++) {
168       x = (appctx.param.Le/2.0)*(appctx.SEMop.gll.nodes[j]+1.0)+appctx.param.Le*i;
169       ind=i*(appctx.param.N-1)+j;
170       wrk_ptr1[ind]=x;
171       wrk_ptr2[ind]=.5*appctx.param.Le*appctx.SEMop.gll.weights[j];
172       if (j==0) wrk_ptr2[ind]+=.5*appctx.param.Le*appctx.SEMop.gll.weights[j];
173     }
174   }
175   CHKERRQ(DMDAVecRestoreArray(appctx.da,appctx.SEMop.grid,&wrk_ptr1));
176   CHKERRQ(DMDAVecRestoreArray(appctx.da,appctx.SEMop.mass,&wrk_ptr2));
177 
178   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
179    Create matrix data structure; set matrix evaluation routine.
180    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
181   CHKERRQ(DMSetMatrixPreallocateOnly(appctx.da, PETSC_TRUE));
182   CHKERRQ(DMCreateMatrix(appctx.da,&appctx.SEMop.stiff));
183   CHKERRQ(DMCreateMatrix(appctx.da,&appctx.SEMop.grad));
184   /*
185    For linear problems with a time-dependent f(u,t) in the equation
186    u_t = f(u,t), the user provides the discretized right-hand-side
187    as a time-dependent matrix.
188    */
189   CHKERRQ(RHSMatrixLaplaciangllDM(appctx.ts,0.0,appctx.dat.curr_sol,appctx.SEMop.stiff,appctx.SEMop.stiff,&appctx));
190   CHKERRQ(RHSMatrixAdvectiongllDM(appctx.ts,0.0,appctx.dat.curr_sol,appctx.SEMop.grad,appctx.SEMop.grad,&appctx));
191    /*
192        For linear problems with a time-dependent f(u,t) in the equation
193        u_t = f(u,t), the user provides the discretized right-hand-side
194        as a time-dependent matrix.
195     */
196 
197   CHKERRQ(MatDuplicate(appctx.SEMop.stiff,MAT_COPY_VALUES,&appctx.SEMop.keptstiff));
198 
199   /* attach the null space to the matrix, this probably is not needed but does no harm */
200   CHKERRQ(MatNullSpaceCreate(PETSC_COMM_WORLD,PETSC_TRUE,0,NULL,&nsp));
201   CHKERRQ(MatSetNullSpace(appctx.SEMop.stiff,nsp));
202   CHKERRQ(MatSetNullSpace(appctx.SEMop.keptstiff,nsp));
203   CHKERRQ(MatNullSpaceTest(nsp,appctx.SEMop.stiff,NULL));
204   CHKERRQ(MatNullSpaceDestroy(&nsp));
205   /* attach the null space to the matrix, this probably is not needed but does no harm */
206   CHKERRQ(MatNullSpaceCreate(PETSC_COMM_WORLD,PETSC_TRUE,0,NULL,&nsp));
207   CHKERRQ(MatSetNullSpace(appctx.SEMop.grad,nsp));
208   CHKERRQ(MatNullSpaceTest(nsp,appctx.SEMop.grad,NULL));
209   CHKERRQ(MatNullSpaceDestroy(&nsp));
210 
211   /* Create the TS solver that solves the ODE and its adjoint; set its options */
212   CHKERRQ(TSCreate(PETSC_COMM_WORLD,&appctx.ts));
213   CHKERRQ(TSSetProblemType(appctx.ts,TS_NONLINEAR));
214   CHKERRQ(TSSetType(appctx.ts,TSRK));
215   CHKERRQ(TSSetDM(appctx.ts,appctx.da));
216   CHKERRQ(TSSetTime(appctx.ts,0.0));
217   CHKERRQ(TSSetTimeStep(appctx.ts,appctx.initial_dt));
218   CHKERRQ(TSSetMaxSteps(appctx.ts,appctx.param.steps));
219   CHKERRQ(TSSetMaxTime(appctx.ts,appctx.param.Tend));
220   CHKERRQ(TSSetExactFinalTime(appctx.ts,TS_EXACTFINALTIME_MATCHSTEP));
221   CHKERRQ(TSSetTolerances(appctx.ts,1e-7,NULL,1e-7,NULL));
222   CHKERRQ(TSSetSaveTrajectory(appctx.ts));
223   CHKERRQ(TSSetFromOptions(appctx.ts));
224   CHKERRQ(TSSetRHSFunction(appctx.ts,NULL,RHSFunction,&appctx));
225   CHKERRQ(TSSetRHSJacobian(appctx.ts,appctx.SEMop.stiff,appctx.SEMop.stiff,RHSJacobian,&appctx));
226 
227   /* Set Initial conditions for the problem  */
228   CHKERRQ(TrueSolution(appctx.ts,0,appctx.dat.curr_sol,&appctx));
229 
230   CHKERRQ(TSSetSolutionFunction(appctx.ts,(PetscErrorCode (*)(TS,PetscReal,Vec,void *))TrueSolution,&appctx));
231   CHKERRQ(TSSetTime(appctx.ts,0.0));
232   CHKERRQ(TSSetStepNumber(appctx.ts,0));
233 
234   CHKERRQ(TSSolve(appctx.ts,appctx.dat.curr_sol));
235 
236   CHKERRQ(MatDestroy(&appctx.SEMop.stiff));
237   CHKERRQ(MatDestroy(&appctx.SEMop.keptstiff));
238   CHKERRQ(MatDestroy(&appctx.SEMop.grad));
239   CHKERRQ(VecDestroy(&appctx.SEMop.grid));
240   CHKERRQ(VecDestroy(&appctx.SEMop.mass));
241   CHKERRQ(VecDestroy(&appctx.dat.curr_sol));
242   CHKERRQ(PetscFree2(appctx.SEMop.gll.nodes,appctx.SEMop.gll.weights));
243   CHKERRQ(DMDestroy(&appctx.da));
244   CHKERRQ(TSDestroy(&appctx.ts));
245 
246   /*
247      Always call PetscFinalize() before exiting a program.  This routine
248        - finalizes the PETSc libraries as well as MPI
249        - provides summary and diagnostic information if certain runtime
250          options are chosen (e.g., -log_summary).
251   */
252     ierr = PetscFinalize();
253     return ierr;
254 }
255 
256 /*
257    TrueSolution() computes the true solution for the PDE
258 
259    Input Parameter:
260    u - uninitialized solution vector (global)
261    appctx - user-defined application context
262 
263    Output Parameter:
264    u - vector with solution at initial time (global)
265 */
266 PetscErrorCode TrueSolution(TS ts, PetscReal t, Vec u,AppCtx *appctx)
267 {
268   PetscScalar       *s;
269   const PetscScalar *xg;
270   PetscInt          i,xs,xn;
271 
272   CHKERRQ(DMDAVecGetArray(appctx->da,u,&s));
273   CHKERRQ(DMDAVecGetArrayRead(appctx->da,appctx->SEMop.grid,(void*)&xg));
274   CHKERRQ(DMDAGetCorners(appctx->da,&xs,NULL,NULL,&xn,NULL,NULL));
275   for (i=xs; i<xs+xn; i++) {
276     s[i]=2.0*appctx->param.mu*PETSC_PI*PetscSinScalar(PETSC_PI*xg[i])*PetscExpReal(-appctx->param.mu*PETSC_PI*PETSC_PI*t)/(2.0+PetscCosScalar(PETSC_PI*xg[i])*PetscExpReal(-appctx->param.mu*PETSC_PI*PETSC_PI*t));
277   }
278   CHKERRQ(DMDAVecRestoreArray(appctx->da,u,&s));
279   CHKERRQ(DMDAVecRestoreArrayRead(appctx->da,appctx->SEMop.grid,(void*)&xg));
280   return 0;
281 }
282 
283 PetscErrorCode RHSFunction(TS ts,PetscReal t,Vec globalin,Vec globalout,void *ctx)
284 {
285   AppCtx          *appctx = (AppCtx*)ctx;
286 
287   PetscFunctionBegin;
288   CHKERRQ(MatMult(appctx->SEMop.grad,globalin,globalout)); /* grad u */
289   CHKERRQ(VecPointwiseMult(globalout,globalin,globalout)); /* u grad u */
290   CHKERRQ(VecScale(globalout, -1.0));
291   CHKERRQ(MatMultAdd(appctx->SEMop.keptstiff,globalin,globalout,globalout));
292   PetscFunctionReturn(0);
293 }
294 
295 /*
296 
297       K is the discretiziation of the Laplacian
298       G is the discretization of the gradient
299 
300       Computes Jacobian of      K u + diag(u) G u   which is given by
301               K   + diag(u)G + diag(Gu)
302 */
303 PetscErrorCode RHSJacobian(TS ts,PetscReal t,Vec globalin,Mat A, Mat B,void *ctx)
304 {
305   AppCtx         *appctx = (AppCtx*)ctx;
306   Vec            Gglobalin;
307 
308   PetscFunctionBegin;
309   /*    A = diag(u) G */
310 
311   CHKERRQ(MatCopy(appctx->SEMop.grad,A,SAME_NONZERO_PATTERN));
312   CHKERRQ(MatDiagonalScale(A,globalin,NULL));
313 
314   /*    A  = A + diag(Gu) */
315   CHKERRQ(VecDuplicate(globalin,&Gglobalin));
316   CHKERRQ(MatMult(appctx->SEMop.grad,globalin,Gglobalin));
317   CHKERRQ(MatDiagonalSet(A,Gglobalin,ADD_VALUES));
318   CHKERRQ(VecDestroy(&Gglobalin));
319 
320   /*   A  = K - A    */
321   CHKERRQ(MatScale(A,-1.0));
322   CHKERRQ(MatAXPY(A,0.0,appctx->SEMop.keptstiff,SAME_NONZERO_PATTERN));
323   PetscFunctionReturn(0);
324 }
325 
326 /* --------------------------------------------------------------------- */
327 
328 #include "petscblaslapack.h"
329 /*
330      Matrix free operation of 1d Laplacian and Grad for GLL spectral elements
331 */
332 PetscErrorCode MatMult_Laplacian(Mat A,Vec x,Vec y)
333 {
334   AppCtx            *appctx;
335   PetscReal         **temp,vv;
336   PetscInt          i,j,xs,xn;
337   Vec               xlocal,ylocal;
338   const PetscScalar *xl;
339   PetscScalar       *yl;
340   PetscBLASInt      _One = 1,n;
341   PetscScalar       _DOne = 1;
342 
343   CHKERRQ(MatShellGetContext(A,&appctx));
344   CHKERRQ(DMGetLocalVector(appctx->da,&xlocal));
345   CHKERRQ(DMGlobalToLocalBegin(appctx->da,x,INSERT_VALUES,xlocal));
346   CHKERRQ(DMGlobalToLocalEnd(appctx->da,x,INSERT_VALUES,xlocal));
347   CHKERRQ(DMGetLocalVector(appctx->da,&ylocal));
348   CHKERRQ(VecSet(ylocal,0.0));
349   CHKERRQ(PetscGaussLobattoLegendreElementLaplacianCreate(appctx->SEMop.gll.n,appctx->SEMop.gll.nodes,appctx->SEMop.gll.weights,&temp));
350   for (i=0; i<appctx->param.N; i++) {
351     vv =-appctx->param.mu*2.0/appctx->param.Le;
352     for (j=0; j<appctx->param.N; j++) temp[i][j]=temp[i][j]*vv;
353   }
354   CHKERRQ(DMDAVecGetArrayRead(appctx->da,xlocal,(void*)&xl));
355   CHKERRQ(DMDAVecGetArray(appctx->da,ylocal,&yl));
356   CHKERRQ(DMDAGetCorners(appctx->da,&xs,NULL,NULL,&xn,NULL,NULL));
357   CHKERRQ(PetscBLASIntCast(appctx->param.N,&n));
358   for (j=xs; j<xs+xn; j += appctx->param.N-1) {
359     PetscStackCallBLAS("BLASgemv",BLASgemv_("N",&n,&n,&_DOne,&temp[0][0],&n,&xl[j],&_One,&_DOne,&yl[j],&_One));
360   }
361   CHKERRQ(DMDAVecRestoreArrayRead(appctx->da,xlocal,(void*)&xl));
362   CHKERRQ(DMDAVecRestoreArray(appctx->da,ylocal,&yl));
363   CHKERRQ(PetscGaussLobattoLegendreElementLaplacianDestroy(appctx->SEMop.gll.n,appctx->SEMop.gll.nodes,appctx->SEMop.gll.weights,&temp));
364   CHKERRQ(VecSet(y,0.0));
365   CHKERRQ(DMLocalToGlobalBegin(appctx->da,ylocal,ADD_VALUES,y));
366   CHKERRQ(DMLocalToGlobalEnd(appctx->da,ylocal,ADD_VALUES,y));
367   CHKERRQ(DMRestoreLocalVector(appctx->da,&xlocal));
368   CHKERRQ(DMRestoreLocalVector(appctx->da,&ylocal));
369   CHKERRQ(VecPointwiseDivide(y,y,appctx->SEMop.mass));
370   return 0;
371 }
372 
373 PetscErrorCode MatMult_Advection(Mat A,Vec x,Vec y)
374 {
375   AppCtx            *appctx;
376   PetscReal         **temp;
377   PetscInt          j,xs,xn;
378   Vec               xlocal,ylocal;
379   const PetscScalar *xl;
380   PetscScalar       *yl;
381   PetscBLASInt      _One = 1,n;
382   PetscScalar       _DOne = 1;
383 
384   CHKERRQ(MatShellGetContext(A,&appctx));
385   CHKERRQ(DMGetLocalVector(appctx->da,&xlocal));
386   CHKERRQ(DMGlobalToLocalBegin(appctx->da,x,INSERT_VALUES,xlocal));
387   CHKERRQ(DMGlobalToLocalEnd(appctx->da,x,INSERT_VALUES,xlocal));
388   CHKERRQ(DMGetLocalVector(appctx->da,&ylocal));
389   CHKERRQ(VecSet(ylocal,0.0));
390   CHKERRQ(PetscGaussLobattoLegendreElementAdvectionCreate(appctx->SEMop.gll.n,appctx->SEMop.gll.nodes,appctx->SEMop.gll.weights,&temp));
391   CHKERRQ(DMDAVecGetArrayRead(appctx->da,xlocal,(void*)&xl));
392   CHKERRQ(DMDAVecGetArray(appctx->da,ylocal,&yl));
393   CHKERRQ(DMDAGetCorners(appctx->da,&xs,NULL,NULL,&xn,NULL,NULL));
394   CHKERRQ(PetscBLASIntCast(appctx->param.N,&n));
395   for (j=xs; j<xs+xn; j += appctx->param.N-1) {
396     PetscStackCallBLAS("BLASgemv",BLASgemv_("N",&n,&n,&_DOne,&temp[0][0],&n,&xl[j],&_One,&_DOne,&yl[j],&_One));
397   }
398   CHKERRQ(DMDAVecRestoreArrayRead(appctx->da,xlocal,(void*)&xl));
399   CHKERRQ(DMDAVecRestoreArray(appctx->da,ylocal,&yl));
400   CHKERRQ(PetscGaussLobattoLegendreElementAdvectionDestroy(appctx->SEMop.gll.n,appctx->SEMop.gll.nodes,appctx->SEMop.gll.weights,&temp));
401   CHKERRQ(VecSet(y,0.0));
402   CHKERRQ(DMLocalToGlobalBegin(appctx->da,ylocal,ADD_VALUES,y));
403   CHKERRQ(DMLocalToGlobalEnd(appctx->da,ylocal,ADD_VALUES,y));
404   CHKERRQ(DMRestoreLocalVector(appctx->da,&xlocal));
405   CHKERRQ(DMRestoreLocalVector(appctx->da,&ylocal));
406   CHKERRQ(VecPointwiseDivide(y,y,appctx->SEMop.mass));
407   CHKERRQ(VecScale(y,-1.0));
408   return 0;
409 }
410 
411 /*
412    RHSMatrixLaplacian - User-provided routine to compute the right-hand-side
413    matrix for the Laplacian operator
414 
415    Input Parameters:
416    ts - the TS context
417    t - current time  (ignored)
418    X - current solution (ignored)
419    dummy - optional user-defined context, as set by TSetRHSJacobian()
420 
421    Output Parameters:
422    AA - Jacobian matrix
423    BB - optionally different matrix from which the preconditioner is built
424    str - flag indicating matrix structure
425 
426 */
427 PetscErrorCode RHSMatrixLaplaciangllDM(TS ts,PetscReal t,Vec X,Mat A,Mat BB,void *ctx)
428 {
429   PetscReal      **temp;
430   PetscReal      vv;
431   AppCtx         *appctx = (AppCtx*)ctx;     /* user-defined application context */
432   PetscInt       i,xs,xn,l,j;
433   PetscInt       *rowsDM;
434   PetscBool      flg = PETSC_FALSE;
435 
436   CHKERRQ(PetscOptionsGetBool(NULL,NULL,"-gll_mf",&flg,NULL));
437 
438   if (!flg) {
439     /*
440      Creates the element stiffness matrix for the given gll
441      */
442     CHKERRQ(PetscGaussLobattoLegendreElementLaplacianCreate(appctx->SEMop.gll.n,appctx->SEMop.gll.nodes,appctx->SEMop.gll.weights,&temp));
443     /* workaround for clang analyzer warning: Division by zero */
444     PetscCheck(appctx->param.N > 1,PETSC_COMM_WORLD,PETSC_ERR_ARG_WRONG,"Spectral element order should be > 1");
445 
446     /* scale by the size of the element */
447     for (i=0; i<appctx->param.N; i++) {
448       vv=-appctx->param.mu*2.0/appctx->param.Le;
449       for (j=0; j<appctx->param.N; j++) temp[i][j]=temp[i][j]*vv;
450     }
451 
452     CHKERRQ(MatSetOption(A,MAT_NEW_NONZERO_ALLOCATION_ERR,PETSC_FALSE));
453     CHKERRQ(DMDAGetCorners(appctx->da,&xs,NULL,NULL,&xn,NULL,NULL));
454 
455     xs   = xs/(appctx->param.N-1);
456     xn   = xn/(appctx->param.N-1);
457 
458     CHKERRQ(PetscMalloc1(appctx->param.N,&rowsDM));
459     /*
460      loop over local elements
461      */
462     for (j=xs; j<xs+xn; j++) {
463       for (l=0; l<appctx->param.N; l++) {
464         rowsDM[l] = 1+(j-xs)*(appctx->param.N-1)+l;
465       }
466       CHKERRQ(MatSetValuesLocal(A,appctx->param.N,rowsDM,appctx->param.N,rowsDM,&temp[0][0],ADD_VALUES));
467     }
468     CHKERRQ(PetscFree(rowsDM));
469     CHKERRQ(MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY));
470     CHKERRQ(MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY));
471     CHKERRQ(VecReciprocal(appctx->SEMop.mass));
472     CHKERRQ(MatDiagonalScale(A,appctx->SEMop.mass,0));
473     CHKERRQ(VecReciprocal(appctx->SEMop.mass));
474 
475     CHKERRQ(PetscGaussLobattoLegendreElementLaplacianDestroy(appctx->SEMop.gll.n,appctx->SEMop.gll.nodes,appctx->SEMop.gll.weights,&temp));
476   } else {
477     CHKERRQ(MatSetType(A,MATSHELL));
478     CHKERRQ(MatSetUp(A));
479     CHKERRQ(MatShellSetContext(A,appctx));
480     CHKERRQ(MatShellSetOperation(A,MATOP_MULT,(void (*)(void))MatMult_Laplacian));
481   }
482   return 0;
483 }
484 
485 /*
486    RHSMatrixAdvection - User-provided routine to compute the right-hand-side
487    matrix for the Advection (gradient) operator.
488 
489    Input Parameters:
490    ts - the TS context
491    t - current time
492    global_in - global input vector
493    dummy - optional user-defined context, as set by TSetRHSJacobian()
494 
495    Output Parameters:
496    AA - Jacobian matrix
497    BB - optionally different preconditioning matrix
498    str - flag indicating matrix structure
499 
500 */
501 PetscErrorCode RHSMatrixAdvectiongllDM(TS ts,PetscReal t,Vec X,Mat A,Mat BB,void *ctx)
502 {
503   PetscReal      **temp;
504   AppCtx         *appctx = (AppCtx*)ctx;     /* user-defined application context */
505   PetscInt       xs,xn,l,j;
506   PetscInt       *rowsDM;
507   PetscBool      flg = PETSC_FALSE;
508 
509   CHKERRQ(PetscOptionsGetBool(NULL,NULL,"-gll_mf",&flg,NULL));
510 
511   if (!flg) {
512     /*
513      Creates the advection matrix for the given gll
514      */
515     CHKERRQ(PetscGaussLobattoLegendreElementAdvectionCreate(appctx->SEMop.gll.n,appctx->SEMop.gll.nodes,appctx->SEMop.gll.weights,&temp));
516     CHKERRQ(MatSetOption(A,MAT_NEW_NONZERO_ALLOCATION_ERR,PETSC_FALSE));
517     CHKERRQ(DMDAGetCorners(appctx->da,&xs,NULL,NULL,&xn,NULL,NULL));
518     xs   = xs/(appctx->param.N-1);
519     xn   = xn/(appctx->param.N-1);
520 
521     CHKERRQ(PetscMalloc1(appctx->param.N,&rowsDM));
522     for (j=xs; j<xs+xn; j++) {
523       for (l=0; l<appctx->param.N; l++) {
524         rowsDM[l] = 1+(j-xs)*(appctx->param.N-1)+l;
525       }
526       CHKERRQ(MatSetValuesLocal(A,appctx->param.N,rowsDM,appctx->param.N,rowsDM,&temp[0][0],ADD_VALUES));
527     }
528     CHKERRQ(PetscFree(rowsDM));
529     CHKERRQ(MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY));
530     CHKERRQ(MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY));
531 
532     CHKERRQ(VecReciprocal(appctx->SEMop.mass));
533     CHKERRQ(MatDiagonalScale(A,appctx->SEMop.mass,0));
534     CHKERRQ(VecReciprocal(appctx->SEMop.mass));
535 
536     CHKERRQ(PetscGaussLobattoLegendreElementAdvectionDestroy(appctx->SEMop.gll.n,appctx->SEMop.gll.nodes,appctx->SEMop.gll.weights,&temp));
537   } else {
538     CHKERRQ(MatSetType(A,MATSHELL));
539     CHKERRQ(MatSetUp(A));
540     CHKERRQ(MatShellSetContext(A,appctx));
541     CHKERRQ(MatShellSetOperation(A,MATOP_MULT,(void (*)(void))MatMult_Advection));
542   }
543   return 0;
544 }
545 
546 /*TEST
547 
548     build:
549       requires: !complex
550 
551     test:
552       suffix: 1
553       requires: !single
554 
555     test:
556       suffix: 2
557       nsize: 5
558       requires: !single
559 
560     test:
561       suffix: 3
562       requires: !single
563       args: -ts_view  -ts_type beuler -gll_mf -pc_type none -ts_max_steps 5 -ts_monitor_error
564 
565     test:
566       suffix: 4
567       requires: !single
568       args: -ts_view  -ts_type beuler  -pc_type none -ts_max_steps 5 -ts_monitor_error
569 
570 TEST*/
571