xref: /petsc/src/ts/tutorials/ex50.c (revision 63a3b9bc7a1f24f247904ccba9383635fe6abade)
1 
2 static char help[] ="Solves one dimensional Burger's equation compares with exact solution\n\n";
3 
4 /*
5 
6     Not yet tested in parallel
7 
8 */
9 
10 /* ------------------------------------------------------------------------
11 
12    This program uses the one-dimensional Burger's equation
13        u_t = mu*u_xx - u u_x,
14    on the domain 0 <= x <= 1, with periodic boundary conditions
15 
16    The operators are discretized with the spectral element method
17 
18    See the paper PDE-CONSTRAINED OPTIMIZATION WITH SPECTRAL ELEMENTS USING PETSC AND TAO
19    by OANA MARIN, EMIL CONSTANTINESCU, AND BARRY SMITH for details on the exact solution
20    used
21 
22    See src/tao/unconstrained/tutorials/burgers_spectral.c
23 
24   ------------------------------------------------------------------------- */
25 
26 #include <petscts.h>
27 #include <petscdt.h>
28 #include <petscdraw.h>
29 #include <petscdmda.h>
30 
31 /*
32    User-defined application context - contains data needed by the
33    application-provided call-back routines.
34 */
35 
36 typedef struct {
37   PetscInt  n;                /* number of nodes */
38   PetscReal *nodes;           /* GLL nodes */
39   PetscReal *weights;         /* GLL weights */
40 } PetscGLL;
41 
42 typedef struct {
43   PetscInt    N;             /* grid points per elements*/
44   PetscInt    E;              /* number of elements */
45   PetscReal   tol_L2,tol_max; /* error norms */
46   PetscInt    steps;          /* number of timesteps */
47   PetscReal   Tend;           /* endtime */
48   PetscReal   mu;             /* viscosity */
49   PetscReal   L;              /* total length of domain */
50   PetscReal   Le;
51   PetscReal   Tadj;
52 } PetscParam;
53 
54 typedef struct {
55   Vec         grid;              /* total grid */
56   Vec         curr_sol;
57 } PetscData;
58 
59 typedef struct {
60   Vec         grid;              /* total grid */
61   Vec         mass;              /* mass matrix for total integration */
62   Mat         stiff;             /* stifness matrix */
63   Mat         keptstiff;
64   Mat         grad;
65   PetscGLL    gll;
66 } PetscSEMOperators;
67 
68 typedef struct {
69   DM                da;                /* distributed array data structure */
70   PetscSEMOperators SEMop;
71   PetscParam        param;
72   PetscData         dat;
73   TS                ts;
74   PetscReal         initial_dt;
75 } AppCtx;
76 
77 /*
78    User-defined routines
79 */
80 extern PetscErrorCode RHSMatrixLaplaciangllDM(TS,PetscReal,Vec,Mat,Mat,void*);
81 extern PetscErrorCode RHSMatrixAdvectiongllDM(TS,PetscReal,Vec,Mat,Mat,void*);
82 extern PetscErrorCode TrueSolution(TS,PetscReal,Vec,AppCtx*);
83 extern PetscErrorCode RHSFunction(TS,PetscReal,Vec,Vec,void*);
84 extern PetscErrorCode RHSJacobian(TS,PetscReal,Vec,Mat,Mat,void*);
85 
86 int main(int argc,char **argv)
87 {
88   AppCtx         appctx;                 /* user-defined application context */
89   PetscInt       i, xs, xm, ind, j, lenglob;
90   PetscReal      x, *wrk_ptr1, *wrk_ptr2;
91   MatNullSpace   nsp;
92   PetscMPIInt    size;
93 
94    /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
95      Initialize program and set problem parameters
96      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
97   PetscFunctionBegin;
98 
99   PetscCall(PetscInitialize(&argc,&argv,(char*)0,help));
100 
101   /*initialize parameters */
102   appctx.param.N    = 10;  /* order of the spectral element */
103   appctx.param.E    = 10;  /* number of elements */
104   appctx.param.L    = 4.0;  /* length of the domain */
105   appctx.param.mu   = 0.01; /* diffusion coefficient */
106   appctx.initial_dt = 5e-3;
107   appctx.param.steps = PETSC_MAX_INT;
108   appctx.param.Tend  = 4;
109 
110   PetscCall(PetscOptionsGetInt(NULL,NULL,"-N",&appctx.param.N,NULL));
111   PetscCall(PetscOptionsGetInt(NULL,NULL,"-E",&appctx.param.E,NULL));
112   PetscCall(PetscOptionsGetReal(NULL,NULL,"-Tend",&appctx.param.Tend,NULL));
113   PetscCall(PetscOptionsGetReal(NULL,NULL,"-mu",&appctx.param.mu,NULL));
114   appctx.param.Le = appctx.param.L/appctx.param.E;
115 
116   PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD,&size));
117   PetscCheck((appctx.param.E % size) == 0,PETSC_COMM_WORLD,PETSC_ERR_ARG_WRONG,"Number of elements must be divisible by number of processes");
118 
119   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
120      Create GLL data structures
121      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
122   PetscCall(PetscMalloc2(appctx.param.N,&appctx.SEMop.gll.nodes,appctx.param.N,&appctx.SEMop.gll.weights));
123   PetscCall(PetscDTGaussLobattoLegendreQuadrature(appctx.param.N,PETSCGAUSSLOBATTOLEGENDRE_VIA_LINEAR_ALGEBRA,appctx.SEMop.gll.nodes,appctx.SEMop.gll.weights));
124   appctx.SEMop.gll.n = appctx.param.N;
125   lenglob  = appctx.param.E*(appctx.param.N-1);
126 
127   /*
128      Create distributed array (DMDA) to manage parallel grid and vectors
129      and to set up the ghost point communication pattern.  There are E*(Nl-1)+1
130      total grid values spread equally among all the processors, except first and last
131   */
132 
133   PetscCall(DMDACreate1d(PETSC_COMM_WORLD,DM_BOUNDARY_PERIODIC,lenglob,1,1,NULL,&appctx.da));
134   PetscCall(DMSetFromOptions(appctx.da));
135   PetscCall(DMSetUp(appctx.da));
136 
137   /*
138      Extract global and local vectors from DMDA; we use these to store the
139      approximate solution.  Then duplicate these for remaining vectors that
140      have the same types.
141   */
142 
143   PetscCall(DMCreateGlobalVector(appctx.da,&appctx.dat.curr_sol));
144   PetscCall(VecDuplicate(appctx.dat.curr_sol,&appctx.SEMop.grid));
145   PetscCall(VecDuplicate(appctx.dat.curr_sol,&appctx.SEMop.mass));
146 
147   PetscCall(DMDAGetCorners(appctx.da,&xs,NULL,NULL,&xm,NULL,NULL));
148   PetscCall(DMDAVecGetArray(appctx.da,appctx.SEMop.grid,&wrk_ptr1));
149   PetscCall(DMDAVecGetArray(appctx.da,appctx.SEMop.mass,&wrk_ptr2));
150 
151   /* Compute function over the locally owned part of the grid */
152 
153     xs=xs/(appctx.param.N-1);
154     xm=xm/(appctx.param.N-1);
155 
156   /*
157      Build total grid and mass over entire mesh (multi-elemental)
158   */
159 
160   for (i=xs; i<xs+xm; i++) {
161     for (j=0; j<appctx.param.N-1; j++) {
162       x = (appctx.param.Le/2.0)*(appctx.SEMop.gll.nodes[j]+1.0)+appctx.param.Le*i;
163       ind=i*(appctx.param.N-1)+j;
164       wrk_ptr1[ind]=x;
165       wrk_ptr2[ind]=.5*appctx.param.Le*appctx.SEMop.gll.weights[j];
166       if (j==0) wrk_ptr2[ind]+=.5*appctx.param.Le*appctx.SEMop.gll.weights[j];
167     }
168   }
169   PetscCall(DMDAVecRestoreArray(appctx.da,appctx.SEMop.grid,&wrk_ptr1));
170   PetscCall(DMDAVecRestoreArray(appctx.da,appctx.SEMop.mass,&wrk_ptr2));
171 
172   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
173    Create matrix data structure; set matrix evaluation routine.
174    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
175   PetscCall(DMSetMatrixPreallocateOnly(appctx.da, PETSC_TRUE));
176   PetscCall(DMCreateMatrix(appctx.da,&appctx.SEMop.stiff));
177   PetscCall(DMCreateMatrix(appctx.da,&appctx.SEMop.grad));
178   /*
179    For linear problems with a time-dependent f(u,t) in the equation
180    u_t = f(u,t), the user provides the discretized right-hand-side
181    as a time-dependent matrix.
182    */
183   PetscCall(RHSMatrixLaplaciangllDM(appctx.ts,0.0,appctx.dat.curr_sol,appctx.SEMop.stiff,appctx.SEMop.stiff,&appctx));
184   PetscCall(RHSMatrixAdvectiongllDM(appctx.ts,0.0,appctx.dat.curr_sol,appctx.SEMop.grad,appctx.SEMop.grad,&appctx));
185    /*
186        For linear problems with a time-dependent f(u,t) in the equation
187        u_t = f(u,t), the user provides the discretized right-hand-side
188        as a time-dependent matrix.
189     */
190 
191   PetscCall(MatDuplicate(appctx.SEMop.stiff,MAT_COPY_VALUES,&appctx.SEMop.keptstiff));
192 
193   /* attach the null space to the matrix, this probably is not needed but does no harm */
194   PetscCall(MatNullSpaceCreate(PETSC_COMM_WORLD,PETSC_TRUE,0,NULL,&nsp));
195   PetscCall(MatSetNullSpace(appctx.SEMop.stiff,nsp));
196   PetscCall(MatSetNullSpace(appctx.SEMop.keptstiff,nsp));
197   PetscCall(MatNullSpaceTest(nsp,appctx.SEMop.stiff,NULL));
198   PetscCall(MatNullSpaceDestroy(&nsp));
199   /* attach the null space to the matrix, this probably is not needed but does no harm */
200   PetscCall(MatNullSpaceCreate(PETSC_COMM_WORLD,PETSC_TRUE,0,NULL,&nsp));
201   PetscCall(MatSetNullSpace(appctx.SEMop.grad,nsp));
202   PetscCall(MatNullSpaceTest(nsp,appctx.SEMop.grad,NULL));
203   PetscCall(MatNullSpaceDestroy(&nsp));
204 
205   /* Create the TS solver that solves the ODE and its adjoint; set its options */
206   PetscCall(TSCreate(PETSC_COMM_WORLD,&appctx.ts));
207   PetscCall(TSSetProblemType(appctx.ts,TS_NONLINEAR));
208   PetscCall(TSSetType(appctx.ts,TSRK));
209   PetscCall(TSSetDM(appctx.ts,appctx.da));
210   PetscCall(TSSetTime(appctx.ts,0.0));
211   PetscCall(TSSetTimeStep(appctx.ts,appctx.initial_dt));
212   PetscCall(TSSetMaxSteps(appctx.ts,appctx.param.steps));
213   PetscCall(TSSetMaxTime(appctx.ts,appctx.param.Tend));
214   PetscCall(TSSetExactFinalTime(appctx.ts,TS_EXACTFINALTIME_MATCHSTEP));
215   PetscCall(TSSetTolerances(appctx.ts,1e-7,NULL,1e-7,NULL));
216   PetscCall(TSSetSaveTrajectory(appctx.ts));
217   PetscCall(TSSetFromOptions(appctx.ts));
218   PetscCall(TSSetRHSFunction(appctx.ts,NULL,RHSFunction,&appctx));
219   PetscCall(TSSetRHSJacobian(appctx.ts,appctx.SEMop.stiff,appctx.SEMop.stiff,RHSJacobian,&appctx));
220 
221   /* Set Initial conditions for the problem  */
222   PetscCall(TrueSolution(appctx.ts,0,appctx.dat.curr_sol,&appctx));
223 
224   PetscCall(TSSetSolutionFunction(appctx.ts,(PetscErrorCode (*)(TS,PetscReal,Vec,void *))TrueSolution,&appctx));
225   PetscCall(TSSetTime(appctx.ts,0.0));
226   PetscCall(TSSetStepNumber(appctx.ts,0));
227 
228   PetscCall(TSSolve(appctx.ts,appctx.dat.curr_sol));
229 
230   PetscCall(MatDestroy(&appctx.SEMop.stiff));
231   PetscCall(MatDestroy(&appctx.SEMop.keptstiff));
232   PetscCall(MatDestroy(&appctx.SEMop.grad));
233   PetscCall(VecDestroy(&appctx.SEMop.grid));
234   PetscCall(VecDestroy(&appctx.SEMop.mass));
235   PetscCall(VecDestroy(&appctx.dat.curr_sol));
236   PetscCall(PetscFree2(appctx.SEMop.gll.nodes,appctx.SEMop.gll.weights));
237   PetscCall(DMDestroy(&appctx.da));
238   PetscCall(TSDestroy(&appctx.ts));
239 
240   /*
241      Always call PetscFinalize() before exiting a program.  This routine
242        - finalizes the PETSc libraries as well as MPI
243        - provides summary and diagnostic information if certain runtime
244          options are chosen (e.g., -log_summary).
245   */
246     PetscCall(PetscFinalize());
247     return 0;
248 }
249 
250 /*
251    TrueSolution() computes the true solution for the PDE
252 
253    Input Parameter:
254    u - uninitialized solution vector (global)
255    appctx - user-defined application context
256 
257    Output Parameter:
258    u - vector with solution at initial time (global)
259 */
260 PetscErrorCode TrueSolution(TS ts, PetscReal t, Vec u,AppCtx *appctx)
261 {
262   PetscScalar       *s;
263   const PetscScalar *xg;
264   PetscInt          i,xs,xn;
265 
266   PetscCall(DMDAVecGetArray(appctx->da,u,&s));
267   PetscCall(DMDAVecGetArrayRead(appctx->da,appctx->SEMop.grid,(void*)&xg));
268   PetscCall(DMDAGetCorners(appctx->da,&xs,NULL,NULL,&xn,NULL,NULL));
269   for (i=xs; i<xs+xn; i++) {
270     s[i]=2.0*appctx->param.mu*PETSC_PI*PetscSinScalar(PETSC_PI*xg[i])*PetscExpReal(-appctx->param.mu*PETSC_PI*PETSC_PI*t)/(2.0+PetscCosScalar(PETSC_PI*xg[i])*PetscExpReal(-appctx->param.mu*PETSC_PI*PETSC_PI*t));
271   }
272   PetscCall(DMDAVecRestoreArray(appctx->da,u,&s));
273   PetscCall(DMDAVecRestoreArrayRead(appctx->da,appctx->SEMop.grid,(void*)&xg));
274   return 0;
275 }
276 
277 PetscErrorCode RHSFunction(TS ts,PetscReal t,Vec globalin,Vec globalout,void *ctx)
278 {
279   AppCtx          *appctx = (AppCtx*)ctx;
280 
281   PetscFunctionBegin;
282   PetscCall(MatMult(appctx->SEMop.grad,globalin,globalout)); /* grad u */
283   PetscCall(VecPointwiseMult(globalout,globalin,globalout)); /* u grad u */
284   PetscCall(VecScale(globalout, -1.0));
285   PetscCall(MatMultAdd(appctx->SEMop.keptstiff,globalin,globalout,globalout));
286   PetscFunctionReturn(0);
287 }
288 
289 /*
290 
291       K is the discretiziation of the Laplacian
292       G is the discretization of the gradient
293 
294       Computes Jacobian of      K u + diag(u) G u   which is given by
295               K   + diag(u)G + diag(Gu)
296 */
297 PetscErrorCode RHSJacobian(TS ts,PetscReal t,Vec globalin,Mat A, Mat B,void *ctx)
298 {
299   AppCtx         *appctx = (AppCtx*)ctx;
300   Vec            Gglobalin;
301 
302   PetscFunctionBegin;
303   /*    A = diag(u) G */
304 
305   PetscCall(MatCopy(appctx->SEMop.grad,A,SAME_NONZERO_PATTERN));
306   PetscCall(MatDiagonalScale(A,globalin,NULL));
307 
308   /*    A  = A + diag(Gu) */
309   PetscCall(VecDuplicate(globalin,&Gglobalin));
310   PetscCall(MatMult(appctx->SEMop.grad,globalin,Gglobalin));
311   PetscCall(MatDiagonalSet(A,Gglobalin,ADD_VALUES));
312   PetscCall(VecDestroy(&Gglobalin));
313 
314   /*   A  = K - A    */
315   PetscCall(MatScale(A,-1.0));
316   PetscCall(MatAXPY(A,0.0,appctx->SEMop.keptstiff,SAME_NONZERO_PATTERN));
317   PetscFunctionReturn(0);
318 }
319 
320 /* --------------------------------------------------------------------- */
321 
322 #include "petscblaslapack.h"
323 /*
324      Matrix free operation of 1d Laplacian and Grad for GLL spectral elements
325 */
326 PetscErrorCode MatMult_Laplacian(Mat A,Vec x,Vec y)
327 {
328   AppCtx            *appctx;
329   PetscReal         **temp,vv;
330   PetscInt          i,j,xs,xn;
331   Vec               xlocal,ylocal;
332   const PetscScalar *xl;
333   PetscScalar       *yl;
334   PetscBLASInt      _One = 1,n;
335   PetscScalar       _DOne = 1;
336 
337   PetscCall(MatShellGetContext(A,&appctx));
338   PetscCall(DMGetLocalVector(appctx->da,&xlocal));
339   PetscCall(DMGlobalToLocalBegin(appctx->da,x,INSERT_VALUES,xlocal));
340   PetscCall(DMGlobalToLocalEnd(appctx->da,x,INSERT_VALUES,xlocal));
341   PetscCall(DMGetLocalVector(appctx->da,&ylocal));
342   PetscCall(VecSet(ylocal,0.0));
343   PetscCall(PetscGaussLobattoLegendreElementLaplacianCreate(appctx->SEMop.gll.n,appctx->SEMop.gll.nodes,appctx->SEMop.gll.weights,&temp));
344   for (i=0; i<appctx->param.N; i++) {
345     vv =-appctx->param.mu*2.0/appctx->param.Le;
346     for (j=0; j<appctx->param.N; j++) temp[i][j]=temp[i][j]*vv;
347   }
348   PetscCall(DMDAVecGetArrayRead(appctx->da,xlocal,(void*)&xl));
349   PetscCall(DMDAVecGetArray(appctx->da,ylocal,&yl));
350   PetscCall(DMDAGetCorners(appctx->da,&xs,NULL,NULL,&xn,NULL,NULL));
351   PetscCall(PetscBLASIntCast(appctx->param.N,&n));
352   for (j=xs; j<xs+xn; j += appctx->param.N-1) {
353     PetscStackCallBLAS("BLASgemv",BLASgemv_("N",&n,&n,&_DOne,&temp[0][0],&n,&xl[j],&_One,&_DOne,&yl[j],&_One));
354   }
355   PetscCall(DMDAVecRestoreArrayRead(appctx->da,xlocal,(void*)&xl));
356   PetscCall(DMDAVecRestoreArray(appctx->da,ylocal,&yl));
357   PetscCall(PetscGaussLobattoLegendreElementLaplacianDestroy(appctx->SEMop.gll.n,appctx->SEMop.gll.nodes,appctx->SEMop.gll.weights,&temp));
358   PetscCall(VecSet(y,0.0));
359   PetscCall(DMLocalToGlobalBegin(appctx->da,ylocal,ADD_VALUES,y));
360   PetscCall(DMLocalToGlobalEnd(appctx->da,ylocal,ADD_VALUES,y));
361   PetscCall(DMRestoreLocalVector(appctx->da,&xlocal));
362   PetscCall(DMRestoreLocalVector(appctx->da,&ylocal));
363   PetscCall(VecPointwiseDivide(y,y,appctx->SEMop.mass));
364   return 0;
365 }
366 
367 PetscErrorCode MatMult_Advection(Mat A,Vec x,Vec y)
368 {
369   AppCtx            *appctx;
370   PetscReal         **temp;
371   PetscInt          j,xs,xn;
372   Vec               xlocal,ylocal;
373   const PetscScalar *xl;
374   PetscScalar       *yl;
375   PetscBLASInt      _One = 1,n;
376   PetscScalar       _DOne = 1;
377 
378   PetscCall(MatShellGetContext(A,&appctx));
379   PetscCall(DMGetLocalVector(appctx->da,&xlocal));
380   PetscCall(DMGlobalToLocalBegin(appctx->da,x,INSERT_VALUES,xlocal));
381   PetscCall(DMGlobalToLocalEnd(appctx->da,x,INSERT_VALUES,xlocal));
382   PetscCall(DMGetLocalVector(appctx->da,&ylocal));
383   PetscCall(VecSet(ylocal,0.0));
384   PetscCall(PetscGaussLobattoLegendreElementAdvectionCreate(appctx->SEMop.gll.n,appctx->SEMop.gll.nodes,appctx->SEMop.gll.weights,&temp));
385   PetscCall(DMDAVecGetArrayRead(appctx->da,xlocal,(void*)&xl));
386   PetscCall(DMDAVecGetArray(appctx->da,ylocal,&yl));
387   PetscCall(DMDAGetCorners(appctx->da,&xs,NULL,NULL,&xn,NULL,NULL));
388   PetscCall(PetscBLASIntCast(appctx->param.N,&n));
389   for (j=xs; j<xs+xn; j += appctx->param.N-1) {
390     PetscStackCallBLAS("BLASgemv",BLASgemv_("N",&n,&n,&_DOne,&temp[0][0],&n,&xl[j],&_One,&_DOne,&yl[j],&_One));
391   }
392   PetscCall(DMDAVecRestoreArrayRead(appctx->da,xlocal,(void*)&xl));
393   PetscCall(DMDAVecRestoreArray(appctx->da,ylocal,&yl));
394   PetscCall(PetscGaussLobattoLegendreElementAdvectionDestroy(appctx->SEMop.gll.n,appctx->SEMop.gll.nodes,appctx->SEMop.gll.weights,&temp));
395   PetscCall(VecSet(y,0.0));
396   PetscCall(DMLocalToGlobalBegin(appctx->da,ylocal,ADD_VALUES,y));
397   PetscCall(DMLocalToGlobalEnd(appctx->da,ylocal,ADD_VALUES,y));
398   PetscCall(DMRestoreLocalVector(appctx->da,&xlocal));
399   PetscCall(DMRestoreLocalVector(appctx->da,&ylocal));
400   PetscCall(VecPointwiseDivide(y,y,appctx->SEMop.mass));
401   PetscCall(VecScale(y,-1.0));
402   return 0;
403 }
404 
405 /*
406    RHSMatrixLaplacian - User-provided routine to compute the right-hand-side
407    matrix for the Laplacian operator
408 
409    Input Parameters:
410    ts - the TS context
411    t - current time  (ignored)
412    X - current solution (ignored)
413    dummy - optional user-defined context, as set by TSetRHSJacobian()
414 
415    Output Parameters:
416    AA - Jacobian matrix
417    BB - optionally different matrix from which the preconditioner is built
418    str - flag indicating matrix structure
419 
420 */
421 PetscErrorCode RHSMatrixLaplaciangllDM(TS ts,PetscReal t,Vec X,Mat A,Mat BB,void *ctx)
422 {
423   PetscReal      **temp;
424   PetscReal      vv;
425   AppCtx         *appctx = (AppCtx*)ctx;     /* user-defined application context */
426   PetscInt       i,xs,xn,l,j;
427   PetscInt       *rowsDM;
428   PetscBool      flg = PETSC_FALSE;
429 
430   PetscCall(PetscOptionsGetBool(NULL,NULL,"-gll_mf",&flg,NULL));
431 
432   if (!flg) {
433     /*
434      Creates the element stiffness matrix for the given gll
435      */
436     PetscCall(PetscGaussLobattoLegendreElementLaplacianCreate(appctx->SEMop.gll.n,appctx->SEMop.gll.nodes,appctx->SEMop.gll.weights,&temp));
437     /* workaround for clang analyzer warning: Division by zero */
438     PetscCheck(appctx->param.N > 1,PETSC_COMM_WORLD,PETSC_ERR_ARG_WRONG,"Spectral element order should be > 1");
439 
440     /* scale by the size of the element */
441     for (i=0; i<appctx->param.N; i++) {
442       vv=-appctx->param.mu*2.0/appctx->param.Le;
443       for (j=0; j<appctx->param.N; j++) temp[i][j]=temp[i][j]*vv;
444     }
445 
446     PetscCall(MatSetOption(A,MAT_NEW_NONZERO_ALLOCATION_ERR,PETSC_FALSE));
447     PetscCall(DMDAGetCorners(appctx->da,&xs,NULL,NULL,&xn,NULL,NULL));
448 
449     xs   = xs/(appctx->param.N-1);
450     xn   = xn/(appctx->param.N-1);
451 
452     PetscCall(PetscMalloc1(appctx->param.N,&rowsDM));
453     /*
454      loop over local elements
455      */
456     for (j=xs; j<xs+xn; j++) {
457       for (l=0; l<appctx->param.N; l++) {
458         rowsDM[l] = 1+(j-xs)*(appctx->param.N-1)+l;
459       }
460       PetscCall(MatSetValuesLocal(A,appctx->param.N,rowsDM,appctx->param.N,rowsDM,&temp[0][0],ADD_VALUES));
461     }
462     PetscCall(PetscFree(rowsDM));
463     PetscCall(MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY));
464     PetscCall(MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY));
465     PetscCall(VecReciprocal(appctx->SEMop.mass));
466     PetscCall(MatDiagonalScale(A,appctx->SEMop.mass,0));
467     PetscCall(VecReciprocal(appctx->SEMop.mass));
468 
469     PetscCall(PetscGaussLobattoLegendreElementLaplacianDestroy(appctx->SEMop.gll.n,appctx->SEMop.gll.nodes,appctx->SEMop.gll.weights,&temp));
470   } else {
471     PetscCall(MatSetType(A,MATSHELL));
472     PetscCall(MatSetUp(A));
473     PetscCall(MatShellSetContext(A,appctx));
474     PetscCall(MatShellSetOperation(A,MATOP_MULT,(void (*)(void))MatMult_Laplacian));
475   }
476   return 0;
477 }
478 
479 /*
480    RHSMatrixAdvection - User-provided routine to compute the right-hand-side
481    matrix for the Advection (gradient) operator.
482 
483    Input Parameters:
484    ts - the TS context
485    t - current time
486    global_in - global input vector
487    dummy - optional user-defined context, as set by TSetRHSJacobian()
488 
489    Output Parameters:
490    AA - Jacobian matrix
491    BB - optionally different preconditioning matrix
492    str - flag indicating matrix structure
493 
494 */
495 PetscErrorCode RHSMatrixAdvectiongllDM(TS ts,PetscReal t,Vec X,Mat A,Mat BB,void *ctx)
496 {
497   PetscReal      **temp;
498   AppCtx         *appctx = (AppCtx*)ctx;     /* user-defined application context */
499   PetscInt       xs,xn,l,j;
500   PetscInt       *rowsDM;
501   PetscBool      flg = PETSC_FALSE;
502 
503   PetscCall(PetscOptionsGetBool(NULL,NULL,"-gll_mf",&flg,NULL));
504 
505   if (!flg) {
506     /*
507      Creates the advection matrix for the given gll
508      */
509     PetscCall(PetscGaussLobattoLegendreElementAdvectionCreate(appctx->SEMop.gll.n,appctx->SEMop.gll.nodes,appctx->SEMop.gll.weights,&temp));
510     PetscCall(MatSetOption(A,MAT_NEW_NONZERO_ALLOCATION_ERR,PETSC_FALSE));
511     PetscCall(DMDAGetCorners(appctx->da,&xs,NULL,NULL,&xn,NULL,NULL));
512     xs   = xs/(appctx->param.N-1);
513     xn   = xn/(appctx->param.N-1);
514 
515     PetscCall(PetscMalloc1(appctx->param.N,&rowsDM));
516     for (j=xs; j<xs+xn; j++) {
517       for (l=0; l<appctx->param.N; l++) {
518         rowsDM[l] = 1+(j-xs)*(appctx->param.N-1)+l;
519       }
520       PetscCall(MatSetValuesLocal(A,appctx->param.N,rowsDM,appctx->param.N,rowsDM,&temp[0][0],ADD_VALUES));
521     }
522     PetscCall(PetscFree(rowsDM));
523     PetscCall(MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY));
524     PetscCall(MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY));
525 
526     PetscCall(VecReciprocal(appctx->SEMop.mass));
527     PetscCall(MatDiagonalScale(A,appctx->SEMop.mass,0));
528     PetscCall(VecReciprocal(appctx->SEMop.mass));
529 
530     PetscCall(PetscGaussLobattoLegendreElementAdvectionDestroy(appctx->SEMop.gll.n,appctx->SEMop.gll.nodes,appctx->SEMop.gll.weights,&temp));
531   } else {
532     PetscCall(MatSetType(A,MATSHELL));
533     PetscCall(MatSetUp(A));
534     PetscCall(MatShellSetContext(A,appctx));
535     PetscCall(MatShellSetOperation(A,MATOP_MULT,(void (*)(void))MatMult_Advection));
536   }
537   return 0;
538 }
539 
540 /*TEST
541 
542     build:
543       requires: !complex
544 
545     test:
546       suffix: 1
547       requires: !single
548 
549     test:
550       suffix: 2
551       nsize: 5
552       requires: !single
553 
554     test:
555       suffix: 3
556       requires: !single
557       args: -ts_view  -ts_type beuler -gll_mf -pc_type none -ts_max_steps 5 -ts_monitor_error
558 
559     test:
560       suffix: 4
561       requires: !single
562       args: -ts_view  -ts_type beuler  -pc_type none -ts_max_steps 5 -ts_monitor_error
563 
564 TEST*/
565