1 2 static char help[] ="Solves one dimensional Burger's equation compares with exact solution\n\n"; 3 4 /* 5 6 Not yet tested in parallel 7 8 */ 9 10 /* ------------------------------------------------------------------------ 11 12 This program uses the one-dimensional Burger's equation 13 u_t = mu*u_xx - u u_x, 14 on the domain 0 <= x <= 1, with periodic boundary conditions 15 16 The operators are discretized with the spectral element method 17 18 See the paper PDE-CONSTRAINED OPTIMIZATION WITH SPECTRAL ELEMENTS USING PETSC AND TAO 19 by OANA MARIN, EMIL CONSTANTINESCU, AND BARRY SMITH for details on the exact solution 20 used 21 22 See src/tao/unconstrained/tutorials/burgers_spectral.c 23 24 ------------------------------------------------------------------------- */ 25 26 #include <petscts.h> 27 #include <petscdt.h> 28 #include <petscdraw.h> 29 #include <petscdmda.h> 30 31 /* 32 User-defined application context - contains data needed by the 33 application-provided call-back routines. 34 */ 35 36 typedef struct { 37 PetscInt n; /* number of nodes */ 38 PetscReal *nodes; /* GLL nodes */ 39 PetscReal *weights; /* GLL weights */ 40 } PetscGLL; 41 42 typedef struct { 43 PetscInt N; /* grid points per elements*/ 44 PetscInt E; /* number of elements */ 45 PetscReal tol_L2,tol_max; /* error norms */ 46 PetscInt steps; /* number of timesteps */ 47 PetscReal Tend; /* endtime */ 48 PetscReal mu; /* viscosity */ 49 PetscReal L; /* total length of domain */ 50 PetscReal Le; 51 PetscReal Tadj; 52 } PetscParam; 53 54 typedef struct { 55 Vec grid; /* total grid */ 56 Vec curr_sol; 57 } PetscData; 58 59 typedef struct { 60 Vec grid; /* total grid */ 61 Vec mass; /* mass matrix for total integration */ 62 Mat stiff; /* stifness matrix */ 63 Mat keptstiff; 64 Mat grad; 65 PetscGLL gll; 66 } PetscSEMOperators; 67 68 typedef struct { 69 DM da; /* distributed array data structure */ 70 PetscSEMOperators SEMop; 71 PetscParam param; 72 PetscData dat; 73 TS ts; 74 PetscReal initial_dt; 75 } AppCtx; 76 77 /* 78 User-defined routines 79 */ 80 extern PetscErrorCode RHSMatrixLaplaciangllDM(TS,PetscReal,Vec,Mat,Mat,void*); 81 extern PetscErrorCode RHSMatrixAdvectiongllDM(TS,PetscReal,Vec,Mat,Mat,void*); 82 extern PetscErrorCode TrueSolution(TS,PetscReal,Vec,AppCtx*); 83 extern PetscErrorCode RHSFunction(TS,PetscReal,Vec,Vec,void*); 84 extern PetscErrorCode RHSJacobian(TS,PetscReal,Vec,Mat,Mat,void*); 85 86 int main(int argc,char **argv) 87 { 88 AppCtx appctx; /* user-defined application context */ 89 PetscInt i, xs, xm, ind, j, lenglob; 90 PetscReal x, *wrk_ptr1, *wrk_ptr2; 91 MatNullSpace nsp; 92 PetscMPIInt size; 93 94 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 95 Initialize program and set problem parameters 96 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 97 PetscFunctionBegin; 98 99 PetscFunctionBeginUser; 100 PetscCall(PetscInitialize(&argc,&argv,(char*)0,help)); 101 102 /*initialize parameters */ 103 appctx.param.N = 10; /* order of the spectral element */ 104 appctx.param.E = 10; /* number of elements */ 105 appctx.param.L = 4.0; /* length of the domain */ 106 appctx.param.mu = 0.01; /* diffusion coefficient */ 107 appctx.initial_dt = 5e-3; 108 appctx.param.steps = PETSC_MAX_INT; 109 appctx.param.Tend = 4; 110 111 PetscCall(PetscOptionsGetInt(NULL,NULL,"-N",&appctx.param.N,NULL)); 112 PetscCall(PetscOptionsGetInt(NULL,NULL,"-E",&appctx.param.E,NULL)); 113 PetscCall(PetscOptionsGetReal(NULL,NULL,"-Tend",&appctx.param.Tend,NULL)); 114 PetscCall(PetscOptionsGetReal(NULL,NULL,"-mu",&appctx.param.mu,NULL)); 115 appctx.param.Le = appctx.param.L/appctx.param.E; 116 117 PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD,&size)); 118 PetscCheck((appctx.param.E % size) == 0,PETSC_COMM_WORLD,PETSC_ERR_ARG_WRONG,"Number of elements must be divisible by number of processes"); 119 120 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 121 Create GLL data structures 122 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 123 PetscCall(PetscMalloc2(appctx.param.N,&appctx.SEMop.gll.nodes,appctx.param.N,&appctx.SEMop.gll.weights)); 124 PetscCall(PetscDTGaussLobattoLegendreQuadrature(appctx.param.N,PETSCGAUSSLOBATTOLEGENDRE_VIA_LINEAR_ALGEBRA,appctx.SEMop.gll.nodes,appctx.SEMop.gll.weights)); 125 appctx.SEMop.gll.n = appctx.param.N; 126 lenglob = appctx.param.E*(appctx.param.N-1); 127 128 /* 129 Create distributed array (DMDA) to manage parallel grid and vectors 130 and to set up the ghost point communication pattern. There are E*(Nl-1)+1 131 total grid values spread equally among all the processors, except first and last 132 */ 133 134 PetscCall(DMDACreate1d(PETSC_COMM_WORLD,DM_BOUNDARY_PERIODIC,lenglob,1,1,NULL,&appctx.da)); 135 PetscCall(DMSetFromOptions(appctx.da)); 136 PetscCall(DMSetUp(appctx.da)); 137 138 /* 139 Extract global and local vectors from DMDA; we use these to store the 140 approximate solution. Then duplicate these for remaining vectors that 141 have the same types. 142 */ 143 144 PetscCall(DMCreateGlobalVector(appctx.da,&appctx.dat.curr_sol)); 145 PetscCall(VecDuplicate(appctx.dat.curr_sol,&appctx.SEMop.grid)); 146 PetscCall(VecDuplicate(appctx.dat.curr_sol,&appctx.SEMop.mass)); 147 148 PetscCall(DMDAGetCorners(appctx.da,&xs,NULL,NULL,&xm,NULL,NULL)); 149 PetscCall(DMDAVecGetArray(appctx.da,appctx.SEMop.grid,&wrk_ptr1)); 150 PetscCall(DMDAVecGetArray(appctx.da,appctx.SEMop.mass,&wrk_ptr2)); 151 152 /* Compute function over the locally owned part of the grid */ 153 154 xs=xs/(appctx.param.N-1); 155 xm=xm/(appctx.param.N-1); 156 157 /* 158 Build total grid and mass over entire mesh (multi-elemental) 159 */ 160 161 for (i=xs; i<xs+xm; i++) { 162 for (j=0; j<appctx.param.N-1; j++) { 163 x = (appctx.param.Le/2.0)*(appctx.SEMop.gll.nodes[j]+1.0)+appctx.param.Le*i; 164 ind=i*(appctx.param.N-1)+j; 165 wrk_ptr1[ind]=x; 166 wrk_ptr2[ind]=.5*appctx.param.Le*appctx.SEMop.gll.weights[j]; 167 if (j==0) wrk_ptr2[ind]+=.5*appctx.param.Le*appctx.SEMop.gll.weights[j]; 168 } 169 } 170 PetscCall(DMDAVecRestoreArray(appctx.da,appctx.SEMop.grid,&wrk_ptr1)); 171 PetscCall(DMDAVecRestoreArray(appctx.da,appctx.SEMop.mass,&wrk_ptr2)); 172 173 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 174 Create matrix data structure; set matrix evaluation routine. 175 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 176 PetscCall(DMSetMatrixPreallocateOnly(appctx.da, PETSC_TRUE)); 177 PetscCall(DMCreateMatrix(appctx.da,&appctx.SEMop.stiff)); 178 PetscCall(DMCreateMatrix(appctx.da,&appctx.SEMop.grad)); 179 /* 180 For linear problems with a time-dependent f(u,t) in the equation 181 u_t = f(u,t), the user provides the discretized right-hand-side 182 as a time-dependent matrix. 183 */ 184 PetscCall(RHSMatrixLaplaciangllDM(appctx.ts,0.0,appctx.dat.curr_sol,appctx.SEMop.stiff,appctx.SEMop.stiff,&appctx)); 185 PetscCall(RHSMatrixAdvectiongllDM(appctx.ts,0.0,appctx.dat.curr_sol,appctx.SEMop.grad,appctx.SEMop.grad,&appctx)); 186 /* 187 For linear problems with a time-dependent f(u,t) in the equation 188 u_t = f(u,t), the user provides the discretized right-hand-side 189 as a time-dependent matrix. 190 */ 191 192 PetscCall(MatDuplicate(appctx.SEMop.stiff,MAT_COPY_VALUES,&appctx.SEMop.keptstiff)); 193 194 /* attach the null space to the matrix, this probably is not needed but does no harm */ 195 PetscCall(MatNullSpaceCreate(PETSC_COMM_WORLD,PETSC_TRUE,0,NULL,&nsp)); 196 PetscCall(MatSetNullSpace(appctx.SEMop.stiff,nsp)); 197 PetscCall(MatSetNullSpace(appctx.SEMop.keptstiff,nsp)); 198 PetscCall(MatNullSpaceTest(nsp,appctx.SEMop.stiff,NULL)); 199 PetscCall(MatNullSpaceDestroy(&nsp)); 200 /* attach the null space to the matrix, this probably is not needed but does no harm */ 201 PetscCall(MatNullSpaceCreate(PETSC_COMM_WORLD,PETSC_TRUE,0,NULL,&nsp)); 202 PetscCall(MatSetNullSpace(appctx.SEMop.grad,nsp)); 203 PetscCall(MatNullSpaceTest(nsp,appctx.SEMop.grad,NULL)); 204 PetscCall(MatNullSpaceDestroy(&nsp)); 205 206 /* Create the TS solver that solves the ODE and its adjoint; set its options */ 207 PetscCall(TSCreate(PETSC_COMM_WORLD,&appctx.ts)); 208 PetscCall(TSSetProblemType(appctx.ts,TS_NONLINEAR)); 209 PetscCall(TSSetType(appctx.ts,TSRK)); 210 PetscCall(TSSetDM(appctx.ts,appctx.da)); 211 PetscCall(TSSetTime(appctx.ts,0.0)); 212 PetscCall(TSSetTimeStep(appctx.ts,appctx.initial_dt)); 213 PetscCall(TSSetMaxSteps(appctx.ts,appctx.param.steps)); 214 PetscCall(TSSetMaxTime(appctx.ts,appctx.param.Tend)); 215 PetscCall(TSSetExactFinalTime(appctx.ts,TS_EXACTFINALTIME_MATCHSTEP)); 216 PetscCall(TSSetTolerances(appctx.ts,1e-7,NULL,1e-7,NULL)); 217 PetscCall(TSSetSaveTrajectory(appctx.ts)); 218 PetscCall(TSSetFromOptions(appctx.ts)); 219 PetscCall(TSSetRHSFunction(appctx.ts,NULL,RHSFunction,&appctx)); 220 PetscCall(TSSetRHSJacobian(appctx.ts,appctx.SEMop.stiff,appctx.SEMop.stiff,RHSJacobian,&appctx)); 221 222 /* Set Initial conditions for the problem */ 223 PetscCall(TrueSolution(appctx.ts,0,appctx.dat.curr_sol,&appctx)); 224 225 PetscCall(TSSetSolutionFunction(appctx.ts,(PetscErrorCode (*)(TS,PetscReal,Vec,void *))TrueSolution,&appctx)); 226 PetscCall(TSSetTime(appctx.ts,0.0)); 227 PetscCall(TSSetStepNumber(appctx.ts,0)); 228 229 PetscCall(TSSolve(appctx.ts,appctx.dat.curr_sol)); 230 231 PetscCall(MatDestroy(&appctx.SEMop.stiff)); 232 PetscCall(MatDestroy(&appctx.SEMop.keptstiff)); 233 PetscCall(MatDestroy(&appctx.SEMop.grad)); 234 PetscCall(VecDestroy(&appctx.SEMop.grid)); 235 PetscCall(VecDestroy(&appctx.SEMop.mass)); 236 PetscCall(VecDestroy(&appctx.dat.curr_sol)); 237 PetscCall(PetscFree2(appctx.SEMop.gll.nodes,appctx.SEMop.gll.weights)); 238 PetscCall(DMDestroy(&appctx.da)); 239 PetscCall(TSDestroy(&appctx.ts)); 240 241 /* 242 Always call PetscFinalize() before exiting a program. This routine 243 - finalizes the PETSc libraries as well as MPI 244 - provides summary and diagnostic information if certain runtime 245 options are chosen (e.g., -log_summary). 246 */ 247 PetscCall(PetscFinalize()); 248 return 0; 249 } 250 251 /* 252 TrueSolution() computes the true solution for the PDE 253 254 Input Parameter: 255 u - uninitialized solution vector (global) 256 appctx - user-defined application context 257 258 Output Parameter: 259 u - vector with solution at initial time (global) 260 */ 261 PetscErrorCode TrueSolution(TS ts, PetscReal t, Vec u,AppCtx *appctx) 262 { 263 PetscScalar *s; 264 const PetscScalar *xg; 265 PetscInt i,xs,xn; 266 267 PetscCall(DMDAVecGetArray(appctx->da,u,&s)); 268 PetscCall(DMDAVecGetArrayRead(appctx->da,appctx->SEMop.grid,(void*)&xg)); 269 PetscCall(DMDAGetCorners(appctx->da,&xs,NULL,NULL,&xn,NULL,NULL)); 270 for (i=xs; i<xs+xn; i++) { 271 s[i]=2.0*appctx->param.mu*PETSC_PI*PetscSinScalar(PETSC_PI*xg[i])*PetscExpReal(-appctx->param.mu*PETSC_PI*PETSC_PI*t)/(2.0+PetscCosScalar(PETSC_PI*xg[i])*PetscExpReal(-appctx->param.mu*PETSC_PI*PETSC_PI*t)); 272 } 273 PetscCall(DMDAVecRestoreArray(appctx->da,u,&s)); 274 PetscCall(DMDAVecRestoreArrayRead(appctx->da,appctx->SEMop.grid,(void*)&xg)); 275 return 0; 276 } 277 278 PetscErrorCode RHSFunction(TS ts,PetscReal t,Vec globalin,Vec globalout,void *ctx) 279 { 280 AppCtx *appctx = (AppCtx*)ctx; 281 282 PetscFunctionBegin; 283 PetscCall(MatMult(appctx->SEMop.grad,globalin,globalout)); /* grad u */ 284 PetscCall(VecPointwiseMult(globalout,globalin,globalout)); /* u grad u */ 285 PetscCall(VecScale(globalout, -1.0)); 286 PetscCall(MatMultAdd(appctx->SEMop.keptstiff,globalin,globalout,globalout)); 287 PetscFunctionReturn(0); 288 } 289 290 /* 291 292 K is the discretiziation of the Laplacian 293 G is the discretization of the gradient 294 295 Computes Jacobian of K u + diag(u) G u which is given by 296 K + diag(u)G + diag(Gu) 297 */ 298 PetscErrorCode RHSJacobian(TS ts,PetscReal t,Vec globalin,Mat A, Mat B,void *ctx) 299 { 300 AppCtx *appctx = (AppCtx*)ctx; 301 Vec Gglobalin; 302 303 PetscFunctionBegin; 304 /* A = diag(u) G */ 305 306 PetscCall(MatCopy(appctx->SEMop.grad,A,SAME_NONZERO_PATTERN)); 307 PetscCall(MatDiagonalScale(A,globalin,NULL)); 308 309 /* A = A + diag(Gu) */ 310 PetscCall(VecDuplicate(globalin,&Gglobalin)); 311 PetscCall(MatMult(appctx->SEMop.grad,globalin,Gglobalin)); 312 PetscCall(MatDiagonalSet(A,Gglobalin,ADD_VALUES)); 313 PetscCall(VecDestroy(&Gglobalin)); 314 315 /* A = K - A */ 316 PetscCall(MatScale(A,-1.0)); 317 PetscCall(MatAXPY(A,0.0,appctx->SEMop.keptstiff,SAME_NONZERO_PATTERN)); 318 PetscFunctionReturn(0); 319 } 320 321 /* --------------------------------------------------------------------- */ 322 323 #include "petscblaslapack.h" 324 /* 325 Matrix free operation of 1d Laplacian and Grad for GLL spectral elements 326 */ 327 PetscErrorCode MatMult_Laplacian(Mat A,Vec x,Vec y) 328 { 329 AppCtx *appctx; 330 PetscReal **temp,vv; 331 PetscInt i,j,xs,xn; 332 Vec xlocal,ylocal; 333 const PetscScalar *xl; 334 PetscScalar *yl; 335 PetscBLASInt _One = 1,n; 336 PetscScalar _DOne = 1; 337 338 PetscCall(MatShellGetContext(A,&appctx)); 339 PetscCall(DMGetLocalVector(appctx->da,&xlocal)); 340 PetscCall(DMGlobalToLocalBegin(appctx->da,x,INSERT_VALUES,xlocal)); 341 PetscCall(DMGlobalToLocalEnd(appctx->da,x,INSERT_VALUES,xlocal)); 342 PetscCall(DMGetLocalVector(appctx->da,&ylocal)); 343 PetscCall(VecSet(ylocal,0.0)); 344 PetscCall(PetscGaussLobattoLegendreElementLaplacianCreate(appctx->SEMop.gll.n,appctx->SEMop.gll.nodes,appctx->SEMop.gll.weights,&temp)); 345 for (i=0; i<appctx->param.N; i++) { 346 vv =-appctx->param.mu*2.0/appctx->param.Le; 347 for (j=0; j<appctx->param.N; j++) temp[i][j]=temp[i][j]*vv; 348 } 349 PetscCall(DMDAVecGetArrayRead(appctx->da,xlocal,(void*)&xl)); 350 PetscCall(DMDAVecGetArray(appctx->da,ylocal,&yl)); 351 PetscCall(DMDAGetCorners(appctx->da,&xs,NULL,NULL,&xn,NULL,NULL)); 352 PetscCall(PetscBLASIntCast(appctx->param.N,&n)); 353 for (j=xs; j<xs+xn; j += appctx->param.N-1) { 354 PetscCallBLAS("BLASgemv",BLASgemv_("N",&n,&n,&_DOne,&temp[0][0],&n,&xl[j],&_One,&_DOne,&yl[j],&_One)); 355 } 356 PetscCall(DMDAVecRestoreArrayRead(appctx->da,xlocal,(void*)&xl)); 357 PetscCall(DMDAVecRestoreArray(appctx->da,ylocal,&yl)); 358 PetscCall(PetscGaussLobattoLegendreElementLaplacianDestroy(appctx->SEMop.gll.n,appctx->SEMop.gll.nodes,appctx->SEMop.gll.weights,&temp)); 359 PetscCall(VecSet(y,0.0)); 360 PetscCall(DMLocalToGlobalBegin(appctx->da,ylocal,ADD_VALUES,y)); 361 PetscCall(DMLocalToGlobalEnd(appctx->da,ylocal,ADD_VALUES,y)); 362 PetscCall(DMRestoreLocalVector(appctx->da,&xlocal)); 363 PetscCall(DMRestoreLocalVector(appctx->da,&ylocal)); 364 PetscCall(VecPointwiseDivide(y,y,appctx->SEMop.mass)); 365 return 0; 366 } 367 368 PetscErrorCode MatMult_Advection(Mat A,Vec x,Vec y) 369 { 370 AppCtx *appctx; 371 PetscReal **temp; 372 PetscInt j,xs,xn; 373 Vec xlocal,ylocal; 374 const PetscScalar *xl; 375 PetscScalar *yl; 376 PetscBLASInt _One = 1,n; 377 PetscScalar _DOne = 1; 378 379 PetscCall(MatShellGetContext(A,&appctx)); 380 PetscCall(DMGetLocalVector(appctx->da,&xlocal)); 381 PetscCall(DMGlobalToLocalBegin(appctx->da,x,INSERT_VALUES,xlocal)); 382 PetscCall(DMGlobalToLocalEnd(appctx->da,x,INSERT_VALUES,xlocal)); 383 PetscCall(DMGetLocalVector(appctx->da,&ylocal)); 384 PetscCall(VecSet(ylocal,0.0)); 385 PetscCall(PetscGaussLobattoLegendreElementAdvectionCreate(appctx->SEMop.gll.n,appctx->SEMop.gll.nodes,appctx->SEMop.gll.weights,&temp)); 386 PetscCall(DMDAVecGetArrayRead(appctx->da,xlocal,(void*)&xl)); 387 PetscCall(DMDAVecGetArray(appctx->da,ylocal,&yl)); 388 PetscCall(DMDAGetCorners(appctx->da,&xs,NULL,NULL,&xn,NULL,NULL)); 389 PetscCall(PetscBLASIntCast(appctx->param.N,&n)); 390 for (j=xs; j<xs+xn; j += appctx->param.N-1) { 391 PetscCallBLAS("BLASgemv",BLASgemv_("N",&n,&n,&_DOne,&temp[0][0],&n,&xl[j],&_One,&_DOne,&yl[j],&_One)); 392 } 393 PetscCall(DMDAVecRestoreArrayRead(appctx->da,xlocal,(void*)&xl)); 394 PetscCall(DMDAVecRestoreArray(appctx->da,ylocal,&yl)); 395 PetscCall(PetscGaussLobattoLegendreElementAdvectionDestroy(appctx->SEMop.gll.n,appctx->SEMop.gll.nodes,appctx->SEMop.gll.weights,&temp)); 396 PetscCall(VecSet(y,0.0)); 397 PetscCall(DMLocalToGlobalBegin(appctx->da,ylocal,ADD_VALUES,y)); 398 PetscCall(DMLocalToGlobalEnd(appctx->da,ylocal,ADD_VALUES,y)); 399 PetscCall(DMRestoreLocalVector(appctx->da,&xlocal)); 400 PetscCall(DMRestoreLocalVector(appctx->da,&ylocal)); 401 PetscCall(VecPointwiseDivide(y,y,appctx->SEMop.mass)); 402 PetscCall(VecScale(y,-1.0)); 403 return 0; 404 } 405 406 /* 407 RHSMatrixLaplacian - User-provided routine to compute the right-hand-side 408 matrix for the Laplacian operator 409 410 Input Parameters: 411 ts - the TS context 412 t - current time (ignored) 413 X - current solution (ignored) 414 dummy - optional user-defined context, as set by TSetRHSJacobian() 415 416 Output Parameters: 417 AA - Jacobian matrix 418 BB - optionally different matrix from which the preconditioner is built 419 str - flag indicating matrix structure 420 421 */ 422 PetscErrorCode RHSMatrixLaplaciangllDM(TS ts,PetscReal t,Vec X,Mat A,Mat BB,void *ctx) 423 { 424 PetscReal **temp; 425 PetscReal vv; 426 AppCtx *appctx = (AppCtx*)ctx; /* user-defined application context */ 427 PetscInt i,xs,xn,l,j; 428 PetscInt *rowsDM; 429 PetscBool flg = PETSC_FALSE; 430 431 PetscCall(PetscOptionsGetBool(NULL,NULL,"-gll_mf",&flg,NULL)); 432 433 if (!flg) { 434 /* 435 Creates the element stiffness matrix for the given gll 436 */ 437 PetscCall(PetscGaussLobattoLegendreElementLaplacianCreate(appctx->SEMop.gll.n,appctx->SEMop.gll.nodes,appctx->SEMop.gll.weights,&temp)); 438 /* workaround for clang analyzer warning: Division by zero */ 439 PetscCheck(appctx->param.N > 1,PETSC_COMM_WORLD,PETSC_ERR_ARG_WRONG,"Spectral element order should be > 1"); 440 441 /* scale by the size of the element */ 442 for (i=0; i<appctx->param.N; i++) { 443 vv=-appctx->param.mu*2.0/appctx->param.Le; 444 for (j=0; j<appctx->param.N; j++) temp[i][j]=temp[i][j]*vv; 445 } 446 447 PetscCall(MatSetOption(A,MAT_NEW_NONZERO_ALLOCATION_ERR,PETSC_FALSE)); 448 PetscCall(DMDAGetCorners(appctx->da,&xs,NULL,NULL,&xn,NULL,NULL)); 449 450 xs = xs/(appctx->param.N-1); 451 xn = xn/(appctx->param.N-1); 452 453 PetscCall(PetscMalloc1(appctx->param.N,&rowsDM)); 454 /* 455 loop over local elements 456 */ 457 for (j=xs; j<xs+xn; j++) { 458 for (l=0; l<appctx->param.N; l++) { 459 rowsDM[l] = 1+(j-xs)*(appctx->param.N-1)+l; 460 } 461 PetscCall(MatSetValuesLocal(A,appctx->param.N,rowsDM,appctx->param.N,rowsDM,&temp[0][0],ADD_VALUES)); 462 } 463 PetscCall(PetscFree(rowsDM)); 464 PetscCall(MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY)); 465 PetscCall(MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY)); 466 PetscCall(VecReciprocal(appctx->SEMop.mass)); 467 PetscCall(MatDiagonalScale(A,appctx->SEMop.mass,0)); 468 PetscCall(VecReciprocal(appctx->SEMop.mass)); 469 470 PetscCall(PetscGaussLobattoLegendreElementLaplacianDestroy(appctx->SEMop.gll.n,appctx->SEMop.gll.nodes,appctx->SEMop.gll.weights,&temp)); 471 } else { 472 PetscCall(MatSetType(A,MATSHELL)); 473 PetscCall(MatSetUp(A)); 474 PetscCall(MatShellSetContext(A,appctx)); 475 PetscCall(MatShellSetOperation(A,MATOP_MULT,(void (*)(void))MatMult_Laplacian)); 476 } 477 return 0; 478 } 479 480 /* 481 RHSMatrixAdvection - User-provided routine to compute the right-hand-side 482 matrix for the Advection (gradient) operator. 483 484 Input Parameters: 485 ts - the TS context 486 t - current time 487 global_in - global input vector 488 dummy - optional user-defined context, as set by TSetRHSJacobian() 489 490 Output Parameters: 491 AA - Jacobian matrix 492 BB - optionally different preconditioning matrix 493 str - flag indicating matrix structure 494 495 */ 496 PetscErrorCode RHSMatrixAdvectiongllDM(TS ts,PetscReal t,Vec X,Mat A,Mat BB,void *ctx) 497 { 498 PetscReal **temp; 499 AppCtx *appctx = (AppCtx*)ctx; /* user-defined application context */ 500 PetscInt xs,xn,l,j; 501 PetscInt *rowsDM; 502 PetscBool flg = PETSC_FALSE; 503 504 PetscCall(PetscOptionsGetBool(NULL,NULL,"-gll_mf",&flg,NULL)); 505 506 if (!flg) { 507 /* 508 Creates the advection matrix for the given gll 509 */ 510 PetscCall(PetscGaussLobattoLegendreElementAdvectionCreate(appctx->SEMop.gll.n,appctx->SEMop.gll.nodes,appctx->SEMop.gll.weights,&temp)); 511 PetscCall(MatSetOption(A,MAT_NEW_NONZERO_ALLOCATION_ERR,PETSC_FALSE)); 512 PetscCall(DMDAGetCorners(appctx->da,&xs,NULL,NULL,&xn,NULL,NULL)); 513 xs = xs/(appctx->param.N-1); 514 xn = xn/(appctx->param.N-1); 515 516 PetscCall(PetscMalloc1(appctx->param.N,&rowsDM)); 517 for (j=xs; j<xs+xn; j++) { 518 for (l=0; l<appctx->param.N; l++) { 519 rowsDM[l] = 1+(j-xs)*(appctx->param.N-1)+l; 520 } 521 PetscCall(MatSetValuesLocal(A,appctx->param.N,rowsDM,appctx->param.N,rowsDM,&temp[0][0],ADD_VALUES)); 522 } 523 PetscCall(PetscFree(rowsDM)); 524 PetscCall(MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY)); 525 PetscCall(MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY)); 526 527 PetscCall(VecReciprocal(appctx->SEMop.mass)); 528 PetscCall(MatDiagonalScale(A,appctx->SEMop.mass,0)); 529 PetscCall(VecReciprocal(appctx->SEMop.mass)); 530 531 PetscCall(PetscGaussLobattoLegendreElementAdvectionDestroy(appctx->SEMop.gll.n,appctx->SEMop.gll.nodes,appctx->SEMop.gll.weights,&temp)); 532 } else { 533 PetscCall(MatSetType(A,MATSHELL)); 534 PetscCall(MatSetUp(A)); 535 PetscCall(MatShellSetContext(A,appctx)); 536 PetscCall(MatShellSetOperation(A,MATOP_MULT,(void (*)(void))MatMult_Advection)); 537 } 538 return 0; 539 } 540 541 /*TEST 542 543 build: 544 requires: !complex 545 546 test: 547 suffix: 1 548 requires: !single 549 550 test: 551 suffix: 2 552 nsize: 5 553 requires: !single 554 555 test: 556 suffix: 3 557 requires: !single 558 args: -ts_view -ts_type beuler -gll_mf -pc_type none -ts_max_steps 5 -ts_monitor_error 559 560 test: 561 suffix: 4 562 requires: !single 563 args: -ts_view -ts_type beuler -pc_type none -ts_max_steps 5 -ts_monitor_error 564 565 TEST*/ 566