1 static char help[] = "Solves one dimensional Burger's equation compares with exact solution\n\n";
2
3 /*
4 Not yet tested in parallel
5 */
6
7 /* ------------------------------------------------------------------------
8
9 This program uses the one-dimensional Burger's equation
10 u_t = mu*u_xx - u u_x,
11 on the domain 0 <= x <= 1, with periodic boundary conditions
12
13 The operators are discretized with the spectral element method
14
15 See the paper PDE-CONSTRAINED OPTIMIZATION WITH SPECTRAL ELEMENTS USING PETSC AND TAO
16 by OANA MARIN, EMIL CONSTANTINESCU, AND BARRY SMITH for details on the exact solution
17 used
18
19 See src/tao/unconstrained/tutorials/burgers_spectral.c
20
21 ------------------------------------------------------------------------- */
22
23 #include <petscts.h>
24 #include <petscdt.h>
25 #include <petscdraw.h>
26 #include <petscdmda.h>
27
28 /*
29 User-defined application context - contains data needed by the
30 application-provided call-back routines.
31 */
32
33 typedef struct {
34 PetscInt n; /* number of nodes */
35 PetscReal *nodes; /* GLL nodes */
36 PetscReal *weights; /* GLL weights */
37 } PetscGLL;
38
39 typedef struct {
40 PetscInt N; /* grid points per elements*/
41 PetscInt E; /* number of elements */
42 PetscReal tol_L2, tol_max; /* error norms */
43 PetscInt steps; /* number of timesteps */
44 PetscReal Tend; /* endtime */
45 PetscReal mu; /* viscosity */
46 PetscReal L; /* total length of domain */
47 PetscReal Le;
48 PetscReal Tadj;
49 } PetscParam;
50
51 typedef struct {
52 Vec grid; /* total grid */
53 Vec curr_sol;
54 } PetscData;
55
56 typedef struct {
57 Vec grid; /* total grid */
58 Vec mass; /* mass matrix for total integration */
59 Mat stiff; /* stiffness matrix */
60 Mat keptstiff;
61 Mat grad;
62 PetscGLL gll;
63 } PetscSEMOperators;
64
65 typedef struct {
66 DM da; /* distributed array data structure */
67 PetscSEMOperators SEMop;
68 PetscParam param;
69 PetscData dat;
70 TS ts;
71 PetscReal initial_dt;
72 } AppCtx;
73
74 /*
75 User-defined routines
76 */
77 extern PetscErrorCode RHSMatrixLaplaciangllDM(TS, PetscReal, Vec, Mat, Mat, void *);
78 extern PetscErrorCode RHSMatrixAdvectiongllDM(TS, PetscReal, Vec, Mat, Mat, void *);
79 extern PetscErrorCode TrueSolution(TS, PetscReal, Vec, AppCtx *);
80 extern PetscErrorCode RHSFunction(TS, PetscReal, Vec, Vec, void *);
81 extern PetscErrorCode RHSJacobian(TS, PetscReal, Vec, Mat, Mat, void *);
82
main(int argc,char ** argv)83 int main(int argc, char **argv)
84 {
85 AppCtx appctx; /* user-defined application context */
86 PetscInt i, xs, xm, ind, j, lenglob;
87 PetscReal x, *wrk_ptr1, *wrk_ptr2;
88 MatNullSpace nsp;
89 PetscMPIInt size;
90
91 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
92 Initialize program and set problem parameters
93 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
94 PetscFunctionBeginUser;
95 PetscCall(PetscInitialize(&argc, &argv, NULL, help));
96
97 /*initialize parameters */
98 appctx.param.N = 10; /* order of the spectral element */
99 appctx.param.E = 10; /* number of elements */
100 appctx.param.L = 4.0; /* length of the domain */
101 appctx.param.mu = 0.01; /* diffusion coefficient */
102 appctx.initial_dt = 5e-3;
103 appctx.param.steps = PETSC_INT_MAX;
104 appctx.param.Tend = 4;
105
106 PetscCall(PetscOptionsGetInt(NULL, NULL, "-N", &appctx.param.N, NULL));
107 PetscCall(PetscOptionsGetInt(NULL, NULL, "-E", &appctx.param.E, NULL));
108 PetscCall(PetscOptionsGetReal(NULL, NULL, "-Tend", &appctx.param.Tend, NULL));
109 PetscCall(PetscOptionsGetReal(NULL, NULL, "-mu", &appctx.param.mu, NULL));
110 appctx.param.Le = appctx.param.L / appctx.param.E;
111
112 PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &size));
113 PetscCheck((appctx.param.E % size) == 0, PETSC_COMM_WORLD, PETSC_ERR_ARG_WRONG, "Number of elements must be divisible by number of processes");
114
115 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
116 Create GLL data structures
117 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
118 PetscCall(PetscMalloc2(appctx.param.N, &appctx.SEMop.gll.nodes, appctx.param.N, &appctx.SEMop.gll.weights));
119 PetscCall(PetscDTGaussLobattoLegendreQuadrature(appctx.param.N, PETSCGAUSSLOBATTOLEGENDRE_VIA_LINEAR_ALGEBRA, appctx.SEMop.gll.nodes, appctx.SEMop.gll.weights));
120 appctx.SEMop.gll.n = appctx.param.N;
121 lenglob = appctx.param.E * (appctx.param.N - 1);
122
123 /*
124 Create distributed array (DMDA) to manage parallel grid and vectors
125 and to set up the ghost point communication pattern. There are E*(Nl-1)+1
126 total grid values spread equally among all the processors, except first and last
127 */
128
129 PetscCall(DMDACreate1d(PETSC_COMM_WORLD, DM_BOUNDARY_PERIODIC, lenglob, 1, 1, NULL, &appctx.da));
130 PetscCall(DMSetFromOptions(appctx.da));
131 PetscCall(DMSetUp(appctx.da));
132
133 /*
134 Extract global and local vectors from DMDA; we use these to store the
135 approximate solution. Then duplicate these for remaining vectors that
136 have the same types.
137 */
138
139 PetscCall(DMCreateGlobalVector(appctx.da, &appctx.dat.curr_sol));
140 PetscCall(VecDuplicate(appctx.dat.curr_sol, &appctx.SEMop.grid));
141 PetscCall(VecDuplicate(appctx.dat.curr_sol, &appctx.SEMop.mass));
142
143 PetscCall(DMDAGetCorners(appctx.da, &xs, NULL, NULL, &xm, NULL, NULL));
144 PetscCall(DMDAVecGetArray(appctx.da, appctx.SEMop.grid, &wrk_ptr1));
145 PetscCall(DMDAVecGetArray(appctx.da, appctx.SEMop.mass, &wrk_ptr2));
146
147 /* Compute function over the locally owned part of the grid */
148
149 xs = xs / (appctx.param.N - 1);
150 xm = xm / (appctx.param.N - 1);
151
152 /*
153 Build total grid and mass over entire mesh (multi-elemental)
154 */
155
156 for (i = xs; i < xs + xm; i++) {
157 for (j = 0; j < appctx.param.N - 1; j++) {
158 x = (appctx.param.Le / 2.0) * (appctx.SEMop.gll.nodes[j] + 1.0) + appctx.param.Le * i;
159 ind = i * (appctx.param.N - 1) + j;
160 wrk_ptr1[ind] = x;
161 wrk_ptr2[ind] = .5 * appctx.param.Le * appctx.SEMop.gll.weights[j];
162 if (j == 0) wrk_ptr2[ind] += .5 * appctx.param.Le * appctx.SEMop.gll.weights[j];
163 }
164 }
165 PetscCall(DMDAVecRestoreArray(appctx.da, appctx.SEMop.grid, &wrk_ptr1));
166 PetscCall(DMDAVecRestoreArray(appctx.da, appctx.SEMop.mass, &wrk_ptr2));
167
168 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
169 Create matrix data structure; set matrix evaluation routine.
170 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
171 PetscCall(DMSetMatrixPreallocateOnly(appctx.da, PETSC_TRUE));
172 PetscCall(DMCreateMatrix(appctx.da, &appctx.SEMop.stiff));
173 PetscCall(DMCreateMatrix(appctx.da, &appctx.SEMop.grad));
174 /*
175 For linear problems with a time-dependent f(u,t) in the equation
176 u_t = f(u,t), the user provides the discretized right-hand side
177 as a time-dependent matrix.
178 */
179 PetscCall(RHSMatrixLaplaciangllDM(appctx.ts, 0.0, appctx.dat.curr_sol, appctx.SEMop.stiff, appctx.SEMop.stiff, &appctx));
180 PetscCall(RHSMatrixAdvectiongllDM(appctx.ts, 0.0, appctx.dat.curr_sol, appctx.SEMop.grad, appctx.SEMop.grad, &appctx));
181 /*
182 For linear problems with a time-dependent f(u,t) in the equation
183 u_t = f(u,t), the user provides the discretized right-hand side
184 as a time-dependent matrix.
185 */
186
187 PetscCall(MatDuplicate(appctx.SEMop.stiff, MAT_COPY_VALUES, &appctx.SEMop.keptstiff));
188
189 /* attach the null space to the matrix, this probably is not needed but does no harm */
190 PetscCall(MatNullSpaceCreate(PETSC_COMM_WORLD, PETSC_TRUE, 0, NULL, &nsp));
191 PetscCall(MatSetNullSpace(appctx.SEMop.stiff, nsp));
192 PetscCall(MatSetNullSpace(appctx.SEMop.keptstiff, nsp));
193 PetscCall(MatNullSpaceTest(nsp, appctx.SEMop.stiff, NULL));
194 PetscCall(MatNullSpaceDestroy(&nsp));
195 /* attach the null space to the matrix, this probably is not needed but does no harm */
196 PetscCall(MatNullSpaceCreate(PETSC_COMM_WORLD, PETSC_TRUE, 0, NULL, &nsp));
197 PetscCall(MatSetNullSpace(appctx.SEMop.grad, nsp));
198 PetscCall(MatNullSpaceTest(nsp, appctx.SEMop.grad, NULL));
199 PetscCall(MatNullSpaceDestroy(&nsp));
200
201 /* Create the TS solver that solves the ODE and its adjoint; set its options */
202 PetscCall(TSCreate(PETSC_COMM_WORLD, &appctx.ts));
203 PetscCall(TSSetProblemType(appctx.ts, TS_NONLINEAR));
204 PetscCall(TSSetType(appctx.ts, TSRK));
205 PetscCall(TSSetDM(appctx.ts, appctx.da));
206 PetscCall(TSSetTime(appctx.ts, 0.0));
207 PetscCall(TSSetTimeStep(appctx.ts, appctx.initial_dt));
208 PetscCall(TSSetMaxSteps(appctx.ts, appctx.param.steps));
209 PetscCall(TSSetMaxTime(appctx.ts, appctx.param.Tend));
210 PetscCall(TSSetExactFinalTime(appctx.ts, TS_EXACTFINALTIME_MATCHSTEP));
211 PetscCall(TSSetTolerances(appctx.ts, 1e-7, NULL, 1e-7, NULL));
212 PetscCall(TSSetSaveTrajectory(appctx.ts));
213 PetscCall(TSSetFromOptions(appctx.ts));
214 PetscCall(TSSetRHSFunction(appctx.ts, NULL, RHSFunction, &appctx));
215 PetscCall(TSSetRHSJacobian(appctx.ts, appctx.SEMop.stiff, appctx.SEMop.stiff, RHSJacobian, &appctx));
216
217 /* Set Initial conditions for the problem */
218 PetscCall(TrueSolution(appctx.ts, 0, appctx.dat.curr_sol, &appctx));
219
220 PetscCall(TSSetSolutionFunction(appctx.ts, (PetscErrorCode (*)(TS, PetscReal, Vec, void *))TrueSolution, &appctx));
221 PetscCall(TSSetTime(appctx.ts, 0.0));
222 PetscCall(TSSetStepNumber(appctx.ts, 0));
223
224 PetscCall(TSSolve(appctx.ts, appctx.dat.curr_sol));
225
226 PetscCall(MatDestroy(&appctx.SEMop.stiff));
227 PetscCall(MatDestroy(&appctx.SEMop.keptstiff));
228 PetscCall(MatDestroy(&appctx.SEMop.grad));
229 PetscCall(VecDestroy(&appctx.SEMop.grid));
230 PetscCall(VecDestroy(&appctx.SEMop.mass));
231 PetscCall(VecDestroy(&appctx.dat.curr_sol));
232 PetscCall(PetscFree2(appctx.SEMop.gll.nodes, appctx.SEMop.gll.weights));
233 PetscCall(DMDestroy(&appctx.da));
234 PetscCall(TSDestroy(&appctx.ts));
235
236 /*
237 Always call PetscFinalize() before exiting a program. This routine
238 - finalizes the PETSc libraries as well as MPI
239 - provides summary and diagnostic information if certain runtime
240 options are chosen (e.g., -log_view).
241 */
242 PetscCall(PetscFinalize());
243 return 0;
244 }
245
246 /*
247 TrueSolution() computes the true solution for the PDE
248
249 Input Parameter:
250 u - uninitialized solution vector (global)
251 appctx - user-defined application context
252
253 Output Parameter:
254 u - vector with solution at initial time (global)
255 */
TrueSolution(TS ts,PetscReal t,Vec u,AppCtx * appctx)256 PetscErrorCode TrueSolution(TS ts, PetscReal t, Vec u, AppCtx *appctx)
257 {
258 PetscScalar *s;
259 const PetscScalar *xg;
260 PetscInt i, xs, xn;
261
262 PetscFunctionBeginUser;
263 PetscCall(DMDAVecGetArray(appctx->da, u, &s));
264 PetscCall(DMDAVecGetArrayRead(appctx->da, appctx->SEMop.grid, (void *)&xg));
265 PetscCall(DMDAGetCorners(appctx->da, &xs, NULL, NULL, &xn, NULL, NULL));
266 for (i = xs; i < xs + xn; i++) {
267 s[i] = 2.0 * appctx->param.mu * PETSC_PI * PetscSinScalar(PETSC_PI * xg[i]) * PetscExpReal(-appctx->param.mu * PETSC_PI * PETSC_PI * t) / (2.0 + PetscCosScalar(PETSC_PI * xg[i]) * PetscExpReal(-appctx->param.mu * PETSC_PI * PETSC_PI * t));
268 }
269 PetscCall(DMDAVecRestoreArray(appctx->da, u, &s));
270 PetscCall(DMDAVecRestoreArrayRead(appctx->da, appctx->SEMop.grid, (void *)&xg));
271 PetscFunctionReturn(PETSC_SUCCESS);
272 }
273
RHSFunction(TS ts,PetscReal t,Vec globalin,Vec globalout,PetscCtx ctx)274 PetscErrorCode RHSFunction(TS ts, PetscReal t, Vec globalin, Vec globalout, PetscCtx ctx)
275 {
276 AppCtx *appctx = (AppCtx *)ctx;
277
278 PetscFunctionBeginUser;
279 PetscCall(MatMult(appctx->SEMop.grad, globalin, globalout)); /* grad u */
280 PetscCall(VecPointwiseMult(globalout, globalin, globalout)); /* u grad u */
281 PetscCall(VecScale(globalout, -1.0));
282 PetscCall(MatMultAdd(appctx->SEMop.keptstiff, globalin, globalout, globalout));
283 PetscFunctionReturn(PETSC_SUCCESS);
284 }
285
286 /*
287
288 K is the discretiziation of the Laplacian
289 G is the discretization of the gradient
290
291 Computes Jacobian of K u + diag(u) G u which is given by
292 K + diag(u)G + diag(Gu)
293 */
RHSJacobian(TS ts,PetscReal t,Vec globalin,Mat A,Mat B,PetscCtx ctx)294 PetscErrorCode RHSJacobian(TS ts, PetscReal t, Vec globalin, Mat A, Mat B, PetscCtx ctx)
295 {
296 AppCtx *appctx = (AppCtx *)ctx;
297 Vec Gglobalin;
298
299 PetscFunctionBeginUser;
300 /* A = diag(u) G */
301
302 PetscCall(MatCopy(appctx->SEMop.grad, A, SAME_NONZERO_PATTERN));
303 PetscCall(MatDiagonalScale(A, globalin, NULL));
304
305 /* A = A + diag(Gu) */
306 PetscCall(VecDuplicate(globalin, &Gglobalin));
307 PetscCall(MatMult(appctx->SEMop.grad, globalin, Gglobalin));
308 PetscCall(MatDiagonalSet(A, Gglobalin, ADD_VALUES));
309 PetscCall(VecDestroy(&Gglobalin));
310
311 /* A = K - A */
312 PetscCall(MatScale(A, -1.0));
313 PetscCall(MatAXPY(A, 0.0, appctx->SEMop.keptstiff, SAME_NONZERO_PATTERN));
314 PetscFunctionReturn(PETSC_SUCCESS);
315 }
316
317 #include <petscblaslapack.h>
318 /*
319 Matrix free operation of 1d Laplacian and Grad for GLL spectral elements
320 */
MatMult_Laplacian(Mat A,Vec x,Vec y)321 PetscErrorCode MatMult_Laplacian(Mat A, Vec x, Vec y)
322 {
323 AppCtx *appctx;
324 PetscReal **temp, vv;
325 PetscInt i, j, xs, xn;
326 Vec xlocal, ylocal;
327 const PetscScalar *xl;
328 PetscScalar *yl;
329 PetscBLASInt _One = 1, n;
330 PetscScalar _DOne = 1;
331
332 PetscFunctionBeginUser;
333 PetscCall(MatShellGetContext(A, &appctx));
334 PetscCall(DMGetLocalVector(appctx->da, &xlocal));
335 PetscCall(DMGlobalToLocalBegin(appctx->da, x, INSERT_VALUES, xlocal));
336 PetscCall(DMGlobalToLocalEnd(appctx->da, x, INSERT_VALUES, xlocal));
337 PetscCall(DMGetLocalVector(appctx->da, &ylocal));
338 PetscCall(VecSet(ylocal, 0.0));
339 PetscCall(PetscGaussLobattoLegendreElementLaplacianCreate(appctx->SEMop.gll.n, appctx->SEMop.gll.nodes, appctx->SEMop.gll.weights, &temp));
340 for (i = 0; i < appctx->param.N; i++) {
341 vv = -appctx->param.mu * 2.0 / appctx->param.Le;
342 for (j = 0; j < appctx->param.N; j++) temp[i][j] = temp[i][j] * vv;
343 }
344 PetscCall(DMDAVecGetArrayRead(appctx->da, xlocal, (void *)&xl));
345 PetscCall(DMDAVecGetArray(appctx->da, ylocal, &yl));
346 PetscCall(DMDAGetCorners(appctx->da, &xs, NULL, NULL, &xn, NULL, NULL));
347 PetscCall(PetscBLASIntCast(appctx->param.N, &n));
348 for (j = xs; j < xs + xn; j += appctx->param.N - 1) PetscCallBLAS("BLASgemv", BLASgemv_("N", &n, &n, &_DOne, &temp[0][0], &n, &xl[j], &_One, &_DOne, &yl[j], &_One));
349 PetscCall(DMDAVecRestoreArrayRead(appctx->da, xlocal, (void *)&xl));
350 PetscCall(DMDAVecRestoreArray(appctx->da, ylocal, &yl));
351 PetscCall(PetscGaussLobattoLegendreElementLaplacianDestroy(appctx->SEMop.gll.n, appctx->SEMop.gll.nodes, appctx->SEMop.gll.weights, &temp));
352 PetscCall(VecSet(y, 0.0));
353 PetscCall(DMLocalToGlobalBegin(appctx->da, ylocal, ADD_VALUES, y));
354 PetscCall(DMLocalToGlobalEnd(appctx->da, ylocal, ADD_VALUES, y));
355 PetscCall(DMRestoreLocalVector(appctx->da, &xlocal));
356 PetscCall(DMRestoreLocalVector(appctx->da, &ylocal));
357 PetscCall(VecPointwiseDivide(y, y, appctx->SEMop.mass));
358 PetscFunctionReturn(PETSC_SUCCESS);
359 }
360
MatMult_Advection(Mat A,Vec x,Vec y)361 PetscErrorCode MatMult_Advection(Mat A, Vec x, Vec y)
362 {
363 AppCtx *appctx;
364 PetscReal **temp;
365 PetscInt j, xs, xn;
366 Vec xlocal, ylocal;
367 const PetscScalar *xl;
368 PetscScalar *yl;
369 PetscBLASInt _One = 1, n;
370 PetscScalar _DOne = 1;
371
372 PetscFunctionBeginUser;
373 PetscCall(MatShellGetContext(A, &appctx));
374 PetscCall(DMGetLocalVector(appctx->da, &xlocal));
375 PetscCall(DMGlobalToLocalBegin(appctx->da, x, INSERT_VALUES, xlocal));
376 PetscCall(DMGlobalToLocalEnd(appctx->da, x, INSERT_VALUES, xlocal));
377 PetscCall(DMGetLocalVector(appctx->da, &ylocal));
378 PetscCall(VecSet(ylocal, 0.0));
379 PetscCall(PetscGaussLobattoLegendreElementAdvectionCreate(appctx->SEMop.gll.n, appctx->SEMop.gll.nodes, appctx->SEMop.gll.weights, &temp));
380 PetscCall(DMDAVecGetArrayRead(appctx->da, xlocal, (void *)&xl));
381 PetscCall(DMDAVecGetArray(appctx->da, ylocal, &yl));
382 PetscCall(DMDAGetCorners(appctx->da, &xs, NULL, NULL, &xn, NULL, NULL));
383 PetscCall(PetscBLASIntCast(appctx->param.N, &n));
384 for (j = xs; j < xs + xn; j += appctx->param.N - 1) PetscCallBLAS("BLASgemv", BLASgemv_("N", &n, &n, &_DOne, &temp[0][0], &n, &xl[j], &_One, &_DOne, &yl[j], &_One));
385 PetscCall(DMDAVecRestoreArrayRead(appctx->da, xlocal, (void *)&xl));
386 PetscCall(DMDAVecRestoreArray(appctx->da, ylocal, &yl));
387 PetscCall(PetscGaussLobattoLegendreElementAdvectionDestroy(appctx->SEMop.gll.n, appctx->SEMop.gll.nodes, appctx->SEMop.gll.weights, &temp));
388 PetscCall(VecSet(y, 0.0));
389 PetscCall(DMLocalToGlobalBegin(appctx->da, ylocal, ADD_VALUES, y));
390 PetscCall(DMLocalToGlobalEnd(appctx->da, ylocal, ADD_VALUES, y));
391 PetscCall(DMRestoreLocalVector(appctx->da, &xlocal));
392 PetscCall(DMRestoreLocalVector(appctx->da, &ylocal));
393 PetscCall(VecPointwiseDivide(y, y, appctx->SEMop.mass));
394 PetscCall(VecScale(y, -1.0));
395 PetscFunctionReturn(PETSC_SUCCESS);
396 }
397
398 /*
399 RHSMatrixLaplacian - User-provided routine to compute the right-hand-side
400 matrix for the Laplacian operator
401
402 Input Parameters:
403 ts - the TS context
404 t - current time (ignored)
405 X - current solution (ignored)
406 dummy - optional user-defined context, as set by TSetRHSJacobian()
407
408 Output Parameters:
409 AA - Jacobian matrix
410 BB - optionally different matrix from which the preconditioner is built
411
412 */
RHSMatrixLaplaciangllDM(TS ts,PetscReal t,Vec X,Mat A,Mat BB,PetscCtx ctx)413 PetscErrorCode RHSMatrixLaplaciangllDM(TS ts, PetscReal t, Vec X, Mat A, Mat BB, PetscCtx ctx)
414 {
415 PetscReal **temp;
416 PetscReal vv;
417 AppCtx *appctx = (AppCtx *)ctx; /* user-defined application context */
418 PetscInt i, xs, xn, l, j;
419 PetscInt *rowsDM;
420 PetscBool flg = PETSC_FALSE;
421
422 PetscFunctionBeginUser;
423 PetscCall(PetscOptionsGetBool(NULL, NULL, "-gll_mf", &flg, NULL));
424
425 if (!flg) {
426 /*
427 Creates the element stiffness matrix for the given gll
428 */
429 PetscCall(PetscGaussLobattoLegendreElementLaplacianCreate(appctx->SEMop.gll.n, appctx->SEMop.gll.nodes, appctx->SEMop.gll.weights, &temp));
430 /* workaround for clang analyzer warning: Division by zero */
431 PetscCheck(appctx->param.N > 1, PETSC_COMM_WORLD, PETSC_ERR_ARG_WRONG, "Spectral element order should be > 1");
432
433 /* scale by the size of the element */
434 for (i = 0; i < appctx->param.N; i++) {
435 vv = -appctx->param.mu * 2.0 / appctx->param.Le;
436 for (j = 0; j < appctx->param.N; j++) temp[i][j] = temp[i][j] * vv;
437 }
438
439 PetscCall(MatSetOption(A, MAT_NEW_NONZERO_ALLOCATION_ERR, PETSC_FALSE));
440 PetscCall(DMDAGetCorners(appctx->da, &xs, NULL, NULL, &xn, NULL, NULL));
441
442 xs = xs / (appctx->param.N - 1);
443 xn = xn / (appctx->param.N - 1);
444
445 PetscCall(PetscMalloc1(appctx->param.N, &rowsDM));
446 /*
447 loop over local elements
448 */
449 for (j = xs; j < xs + xn; j++) {
450 for (l = 0; l < appctx->param.N; l++) rowsDM[l] = 1 + (j - xs) * (appctx->param.N - 1) + l;
451 PetscCall(MatSetValuesLocal(A, appctx->param.N, rowsDM, appctx->param.N, rowsDM, &temp[0][0], ADD_VALUES));
452 }
453 PetscCall(PetscFree(rowsDM));
454 PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY));
455 PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY));
456 PetscCall(VecReciprocal(appctx->SEMop.mass));
457 PetscCall(MatDiagonalScale(A, appctx->SEMop.mass, 0));
458 PetscCall(VecReciprocal(appctx->SEMop.mass));
459
460 PetscCall(PetscGaussLobattoLegendreElementLaplacianDestroy(appctx->SEMop.gll.n, appctx->SEMop.gll.nodes, appctx->SEMop.gll.weights, &temp));
461 } else {
462 PetscCall(MatSetType(A, MATSHELL));
463 PetscCall(MatSetUp(A));
464 PetscCall(MatShellSetContext(A, appctx));
465 PetscCall(MatShellSetOperation(A, MATOP_MULT, (PetscErrorCodeFn *)MatMult_Laplacian));
466 }
467 PetscFunctionReturn(PETSC_SUCCESS);
468 }
469
470 /*
471 RHSMatrixAdvection - User-provided routine to compute the right-hand-side
472 matrix for the Advection (gradient) operator.
473
474 Input Parameters:
475 ts - the TS context
476 t - current time
477 global_in - global input vector
478 dummy - optional user-defined context, as set by TSetRHSJacobian()
479
480 Output Parameters:
481 AA - Jacobian matrix
482 BB - optionally different matrix used to construct the preconditioner
483
484 */
RHSMatrixAdvectiongllDM(TS ts,PetscReal t,Vec X,Mat A,Mat BB,PetscCtx ctx)485 PetscErrorCode RHSMatrixAdvectiongllDM(TS ts, PetscReal t, Vec X, Mat A, Mat BB, PetscCtx ctx)
486 {
487 PetscReal **temp;
488 AppCtx *appctx = (AppCtx *)ctx; /* user-defined application context */
489 PetscInt xs, xn, l, j;
490 PetscInt *rowsDM;
491 PetscBool flg = PETSC_FALSE;
492
493 PetscFunctionBeginUser;
494 PetscCall(PetscOptionsGetBool(NULL, NULL, "-gll_mf", &flg, NULL));
495
496 if (!flg) {
497 /*
498 Creates the advection matrix for the given gll
499 */
500 PetscCall(PetscGaussLobattoLegendreElementAdvectionCreate(appctx->SEMop.gll.n, appctx->SEMop.gll.nodes, appctx->SEMop.gll.weights, &temp));
501 PetscCall(MatSetOption(A, MAT_NEW_NONZERO_ALLOCATION_ERR, PETSC_FALSE));
502 PetscCall(DMDAGetCorners(appctx->da, &xs, NULL, NULL, &xn, NULL, NULL));
503 xs = xs / (appctx->param.N - 1);
504 xn = xn / (appctx->param.N - 1);
505
506 PetscCall(PetscMalloc1(appctx->param.N, &rowsDM));
507 for (j = xs; j < xs + xn; j++) {
508 for (l = 0; l < appctx->param.N; l++) rowsDM[l] = 1 + (j - xs) * (appctx->param.N - 1) + l;
509 PetscCall(MatSetValuesLocal(A, appctx->param.N, rowsDM, appctx->param.N, rowsDM, &temp[0][0], ADD_VALUES));
510 }
511 PetscCall(PetscFree(rowsDM));
512 PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY));
513 PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY));
514
515 PetscCall(VecReciprocal(appctx->SEMop.mass));
516 PetscCall(MatDiagonalScale(A, appctx->SEMop.mass, 0));
517 PetscCall(VecReciprocal(appctx->SEMop.mass));
518
519 PetscCall(PetscGaussLobattoLegendreElementAdvectionDestroy(appctx->SEMop.gll.n, appctx->SEMop.gll.nodes, appctx->SEMop.gll.weights, &temp));
520 } else {
521 PetscCall(MatSetType(A, MATSHELL));
522 PetscCall(MatSetUp(A));
523 PetscCall(MatShellSetContext(A, appctx));
524 PetscCall(MatShellSetOperation(A, MATOP_MULT, (PetscErrorCodeFn *)MatMult_Advection));
525 }
526 PetscFunctionReturn(PETSC_SUCCESS);
527 }
528
529 /*TEST
530
531 build:
532 requires: !complex
533
534 test:
535 suffix: 1
536 requires: !single
537 output_file: output/empty.out
538
539 test:
540 suffix: 2
541 nsize: 5
542 requires: !single
543 output_file: output/empty.out
544
545 test:
546 suffix: 3
547 requires: !single
548 args: -ts_view -ts_type beuler -gll_mf -pc_type none -ts_max_steps 5 -ts_monitor_error
549
550 test:
551 suffix: 4
552 requires: !single
553 args: -ts_view -ts_type beuler -pc_type none -ts_max_steps 5 -ts_monitor_error
554
555 TEST*/
556