1 2 static char help[] ="Solves a simple data assimilation problem with one dimensional advection diffusion equation using TSAdjoint\n\n"; 3 4 /* 5 6 Not yet tested in parallel 7 8 */ 9 10 /* ------------------------------------------------------------------------ 11 12 This program uses the one-dimensional advection-diffusion equation), 13 u_t = mu*u_xx - a u_x, 14 on the domain 0 <= x <= 1, with periodic boundary conditions 15 16 to demonstrate solving a data assimilation problem of finding the initial conditions 17 to produce a given solution at a fixed time. 18 19 The operators are discretized with the spectral element method 20 21 ------------------------------------------------------------------------- */ 22 23 /* 24 Include "petscts.h" so that we can use TS solvers. Note that this file 25 automatically includes: 26 petscsys.h - base PETSc routines petscvec.h - vectors 27 petscmat.h - matrices 28 petscis.h - index sets petscksp.h - Krylov subspace methods 29 petscviewer.h - viewers petscpc.h - preconditioners 30 petscksp.h - linear solvers petscsnes.h - nonlinear solvers 31 */ 32 33 #include <petsctao.h> 34 #include <petscts.h> 35 #include <petscdt.h> 36 #include <petscdraw.h> 37 #include <petscdmda.h> 38 39 /* 40 User-defined application context - contains data needed by the 41 application-provided call-back routines. 42 */ 43 44 typedef struct { 45 PetscInt n; /* number of nodes */ 46 PetscReal *nodes; /* GLL nodes */ 47 PetscReal *weights; /* GLL weights */ 48 } PetscGLL; 49 50 typedef struct { 51 PetscInt N; /* grid points per elements*/ 52 PetscInt E; /* number of elements */ 53 PetscReal tol_L2,tol_max; /* error norms */ 54 PetscInt steps; /* number of timesteps */ 55 PetscReal Tend; /* endtime */ 56 PetscReal mu; /* viscosity */ 57 PetscReal a; /* advection speed */ 58 PetscReal L; /* total length of domain */ 59 PetscReal Le; 60 PetscReal Tadj; 61 } PetscParam; 62 63 typedef struct { 64 Vec reference; /* desired end state */ 65 Vec grid; /* total grid */ 66 Vec grad; 67 Vec ic; 68 Vec curr_sol; 69 Vec joe; 70 Vec true_solution; /* actual initial conditions for the final solution */ 71 } PetscData; 72 73 typedef struct { 74 Vec grid; /* total grid */ 75 Vec mass; /* mass matrix for total integration */ 76 Mat stiff; /* stifness matrix */ 77 Mat advec; 78 Mat keptstiff; 79 PetscGLL gll; 80 } PetscSEMOperators; 81 82 typedef struct { 83 DM da; /* distributed array data structure */ 84 PetscSEMOperators SEMop; 85 PetscParam param; 86 PetscData dat; 87 TS ts; 88 PetscReal initial_dt; 89 PetscReal *solutioncoefficients; 90 PetscInt ncoeff; 91 } AppCtx; 92 93 /* 94 User-defined routines 95 */ 96 extern PetscErrorCode FormFunctionGradient(Tao,Vec,PetscReal*,Vec,void*); 97 extern PetscErrorCode RHSLaplacian(TS,PetscReal,Vec,Mat,Mat,void*); 98 extern PetscErrorCode RHSAdvection(TS,PetscReal,Vec,Mat,Mat,void*); 99 extern PetscErrorCode InitialConditions(Vec,AppCtx*); 100 extern PetscErrorCode ComputeReference(TS,PetscReal,Vec,AppCtx*); 101 extern PetscErrorCode MonitorError(Tao,void*); 102 extern PetscErrorCode MonitorDestroy(void**); 103 extern PetscErrorCode ComputeSolutionCoefficients(AppCtx*); 104 extern PetscErrorCode RHSFunction(TS,PetscReal,Vec,Vec,void*); 105 extern PetscErrorCode RHSJacobian(TS,PetscReal,Vec,Mat,Mat,void*); 106 107 int main(int argc,char **argv) 108 { 109 AppCtx appctx; /* user-defined application context */ 110 Tao tao; 111 Vec u; /* approximate solution vector */ 112 PetscInt i, xs, xm, ind, j, lenglob; 113 PetscReal x, *wrk_ptr1, *wrk_ptr2; 114 MatNullSpace nsp; 115 116 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 117 Initialize program and set problem parameters 118 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 119 PetscFunctionBegin; 120 121 PetscCall(PetscInitialize(&argc,&argv,(char*)0,help)); 122 123 /*initialize parameters */ 124 appctx.param.N = 10; /* order of the spectral element */ 125 appctx.param.E = 8; /* number of elements */ 126 appctx.param.L = 1.0; /* length of the domain */ 127 appctx.param.mu = 0.00001; /* diffusion coefficient */ 128 appctx.param.a = 0.0; /* advection speed */ 129 appctx.initial_dt = 1e-4; 130 appctx.param.steps = PETSC_MAX_INT; 131 appctx.param.Tend = 0.01; 132 appctx.ncoeff = 2; 133 134 PetscCall(PetscOptionsGetInt(NULL,NULL,"-N",&appctx.param.N,NULL)); 135 PetscCall(PetscOptionsGetInt(NULL,NULL,"-E",&appctx.param.E,NULL)); 136 PetscCall(PetscOptionsGetInt(NULL,NULL,"-ncoeff",&appctx.ncoeff,NULL)); 137 PetscCall(PetscOptionsGetReal(NULL,NULL,"-Tend",&appctx.param.Tend,NULL)); 138 PetscCall(PetscOptionsGetReal(NULL,NULL,"-mu",&appctx.param.mu,NULL)); 139 PetscCall(PetscOptionsGetReal(NULL,NULL,"-a",&appctx.param.a,NULL)); 140 appctx.param.Le = appctx.param.L/appctx.param.E; 141 142 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 143 Create GLL data structures 144 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 145 PetscCall(PetscMalloc2(appctx.param.N,&appctx.SEMop.gll.nodes,appctx.param.N,&appctx.SEMop.gll.weights)); 146 PetscCall(PetscDTGaussLobattoLegendreQuadrature(appctx.param.N,PETSCGAUSSLOBATTOLEGENDRE_VIA_LINEAR_ALGEBRA,appctx.SEMop.gll.nodes,appctx.SEMop.gll.weights)); 147 appctx.SEMop.gll.n = appctx.param.N; 148 lenglob = appctx.param.E*(appctx.param.N-1); 149 150 /* 151 Create distributed array (DMDA) to manage parallel grid and vectors 152 and to set up the ghost point communication pattern. There are E*(Nl-1)+1 153 total grid values spread equally among all the processors, except first and last 154 */ 155 156 PetscCall(DMDACreate1d(PETSC_COMM_WORLD,DM_BOUNDARY_PERIODIC,lenglob,1,1,NULL,&appctx.da)); 157 PetscCall(DMSetFromOptions(appctx.da)); 158 PetscCall(DMSetUp(appctx.da)); 159 160 /* 161 Extract global and local vectors from DMDA; we use these to store the 162 approximate solution. Then duplicate these for remaining vectors that 163 have the same types. 164 */ 165 166 PetscCall(DMCreateGlobalVector(appctx.da,&u)); 167 PetscCall(VecDuplicate(u,&appctx.dat.ic)); 168 PetscCall(VecDuplicate(u,&appctx.dat.true_solution)); 169 PetscCall(VecDuplicate(u,&appctx.dat.reference)); 170 PetscCall(VecDuplicate(u,&appctx.SEMop.grid)); 171 PetscCall(VecDuplicate(u,&appctx.SEMop.mass)); 172 PetscCall(VecDuplicate(u,&appctx.dat.curr_sol)); 173 PetscCall(VecDuplicate(u,&appctx.dat.joe)); 174 175 PetscCall(DMDAGetCorners(appctx.da,&xs,NULL,NULL,&xm,NULL,NULL)); 176 PetscCall(DMDAVecGetArray(appctx.da,appctx.SEMop.grid,&wrk_ptr1)); 177 PetscCall(DMDAVecGetArray(appctx.da,appctx.SEMop.mass,&wrk_ptr2)); 178 179 /* Compute function over the locally owned part of the grid */ 180 181 xs=xs/(appctx.param.N-1); 182 xm=xm/(appctx.param.N-1); 183 184 /* 185 Build total grid and mass over entire mesh (multi-elemental) 186 */ 187 188 for (i=xs; i<xs+xm; i++) { 189 for (j=0; j<appctx.param.N-1; j++) { 190 x = (appctx.param.Le/2.0)*(appctx.SEMop.gll.nodes[j]+1.0)+appctx.param.Le*i; 191 ind=i*(appctx.param.N-1)+j; 192 wrk_ptr1[ind]=x; 193 wrk_ptr2[ind]=.5*appctx.param.Le*appctx.SEMop.gll.weights[j]; 194 if (j==0) wrk_ptr2[ind]+=.5*appctx.param.Le*appctx.SEMop.gll.weights[j]; 195 } 196 } 197 PetscCall(DMDAVecRestoreArray(appctx.da,appctx.SEMop.grid,&wrk_ptr1)); 198 PetscCall(DMDAVecRestoreArray(appctx.da,appctx.SEMop.mass,&wrk_ptr2)); 199 200 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 201 Create matrix data structure; set matrix evaluation routine. 202 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 203 PetscCall(DMSetMatrixPreallocateOnly(appctx.da, PETSC_TRUE)); 204 PetscCall(DMCreateMatrix(appctx.da,&appctx.SEMop.stiff)); 205 PetscCall(DMCreateMatrix(appctx.da,&appctx.SEMop.advec)); 206 207 /* 208 For linear problems with a time-dependent f(u,t) in the equation 209 u_t = f(u,t), the user provides the discretized right-hand-side 210 as a time-dependent matrix. 211 */ 212 PetscCall(RHSLaplacian(appctx.ts,0.0,u,appctx.SEMop.stiff,appctx.SEMop.stiff,&appctx)); 213 PetscCall(RHSAdvection(appctx.ts,0.0,u,appctx.SEMop.advec,appctx.SEMop.advec,&appctx)); 214 PetscCall(MatAXPY(appctx.SEMop.stiff,-1.0,appctx.SEMop.advec,DIFFERENT_NONZERO_PATTERN)); 215 PetscCall(MatDuplicate(appctx.SEMop.stiff,MAT_COPY_VALUES,&appctx.SEMop.keptstiff)); 216 217 /* attach the null space to the matrix, this probably is not needed but does no harm */ 218 PetscCall(MatNullSpaceCreate(PETSC_COMM_WORLD,PETSC_TRUE,0,NULL,&nsp)); 219 PetscCall(MatSetNullSpace(appctx.SEMop.stiff,nsp)); 220 PetscCall(MatNullSpaceTest(nsp,appctx.SEMop.stiff,NULL)); 221 PetscCall(MatNullSpaceDestroy(&nsp)); 222 223 /* Create the TS solver that solves the ODE and its adjoint; set its options */ 224 PetscCall(TSCreate(PETSC_COMM_WORLD,&appctx.ts)); 225 PetscCall(TSSetSolutionFunction(appctx.ts,(PetscErrorCode (*)(TS,PetscReal,Vec, void *))ComputeReference,&appctx)); 226 PetscCall(TSSetProblemType(appctx.ts,TS_LINEAR)); 227 PetscCall(TSSetType(appctx.ts,TSRK)); 228 PetscCall(TSSetDM(appctx.ts,appctx.da)); 229 PetscCall(TSSetTime(appctx.ts,0.0)); 230 PetscCall(TSSetTimeStep(appctx.ts,appctx.initial_dt)); 231 PetscCall(TSSetMaxSteps(appctx.ts,appctx.param.steps)); 232 PetscCall(TSSetMaxTime(appctx.ts,appctx.param.Tend)); 233 PetscCall(TSSetExactFinalTime(appctx.ts,TS_EXACTFINALTIME_MATCHSTEP)); 234 PetscCall(TSSetTolerances(appctx.ts,1e-7,NULL,1e-7,NULL)); 235 PetscCall(TSSetFromOptions(appctx.ts)); 236 /* Need to save initial timestep user may have set with -ts_dt so it can be reset for each new TSSolve() */ 237 PetscCall(TSGetTimeStep(appctx.ts,&appctx.initial_dt)); 238 PetscCall(TSSetRHSFunction(appctx.ts,NULL,TSComputeRHSFunctionLinear,&appctx)); 239 PetscCall(TSSetRHSJacobian(appctx.ts,appctx.SEMop.stiff,appctx.SEMop.stiff,TSComputeRHSJacobianConstant,&appctx)); 240 /* PetscCall(TSSetRHSFunction(appctx.ts,NULL,RHSFunction,&appctx)); 241 PetscCall(TSSetRHSJacobian(appctx.ts,appctx.SEMop.stiff,appctx.SEMop.stiff,RHSJacobian,&appctx)); */ 242 243 /* Set random initial conditions as initial guess, compute analytic reference solution and analytic (true) initial conditions */ 244 PetscCall(ComputeSolutionCoefficients(&appctx)); 245 PetscCall(InitialConditions(appctx.dat.ic,&appctx)); 246 PetscCall(ComputeReference(appctx.ts,appctx.param.Tend,appctx.dat.reference,&appctx)); 247 PetscCall(ComputeReference(appctx.ts,0.0,appctx.dat.true_solution,&appctx)); 248 249 /* Set up to save trajectory before TSSetFromOptions() so that TSTrajectory options can be captured */ 250 PetscCall(TSSetSaveTrajectory(appctx.ts)); 251 PetscCall(TSSetFromOptions(appctx.ts)); 252 253 /* Create TAO solver and set desired solution method */ 254 PetscCall(TaoCreate(PETSC_COMM_WORLD,&tao)); 255 PetscCall(TaoSetMonitor(tao,MonitorError,&appctx,MonitorDestroy)); 256 PetscCall(TaoSetType(tao,TAOBQNLS)); 257 PetscCall(TaoSetSolution(tao,appctx.dat.ic)); 258 /* Set routine for function and gradient evaluation */ 259 PetscCall(TaoSetObjectiveAndGradient(tao,NULL,FormFunctionGradient,(void *)&appctx)); 260 /* Check for any TAO command line options */ 261 PetscCall(TaoSetTolerances(tao,1e-8,PETSC_DEFAULT,PETSC_DEFAULT)); 262 PetscCall(TaoSetFromOptions(tao)); 263 PetscCall(TaoSolve(tao)); 264 265 PetscCall(TaoDestroy(&tao)); 266 PetscCall(PetscFree(appctx.solutioncoefficients)); 267 PetscCall(MatDestroy(&appctx.SEMop.advec)); 268 PetscCall(MatDestroy(&appctx.SEMop.stiff)); 269 PetscCall(MatDestroy(&appctx.SEMop.keptstiff)); 270 PetscCall(VecDestroy(&u)); 271 PetscCall(VecDestroy(&appctx.dat.ic)); 272 PetscCall(VecDestroy(&appctx.dat.joe)); 273 PetscCall(VecDestroy(&appctx.dat.true_solution)); 274 PetscCall(VecDestroy(&appctx.dat.reference)); 275 PetscCall(VecDestroy(&appctx.SEMop.grid)); 276 PetscCall(VecDestroy(&appctx.SEMop.mass)); 277 PetscCall(VecDestroy(&appctx.dat.curr_sol)); 278 PetscCall(PetscFree2(appctx.SEMop.gll.nodes,appctx.SEMop.gll.weights)); 279 PetscCall(DMDestroy(&appctx.da)); 280 PetscCall(TSDestroy(&appctx.ts)); 281 282 /* 283 Always call PetscFinalize() before exiting a program. This routine 284 - finalizes the PETSc libraries as well as MPI 285 - provides summary and diagnostic information if certain runtime 286 options are chosen (e.g., -log_summary). 287 */ 288 PetscCall(PetscFinalize()); 289 return 0; 290 } 291 292 /* 293 Computes the coefficients for the analytic solution to the PDE 294 */ 295 PetscErrorCode ComputeSolutionCoefficients(AppCtx *appctx) 296 { 297 PetscRandom rand; 298 PetscInt i; 299 300 PetscFunctionBegin; 301 PetscCall(PetscMalloc1(appctx->ncoeff,&appctx->solutioncoefficients)); 302 PetscCall(PetscRandomCreate(PETSC_COMM_WORLD,&rand)); 303 PetscCall(PetscRandomSetInterval(rand,.9,1.0)); 304 for (i=0; i<appctx->ncoeff; i++) { 305 PetscCall(PetscRandomGetValue(rand,&appctx->solutioncoefficients[i])); 306 } 307 PetscCall(PetscRandomDestroy(&rand)); 308 PetscFunctionReturn(0); 309 } 310 311 /* --------------------------------------------------------------------- */ 312 /* 313 InitialConditions - Computes the (random) initial conditions for the Tao optimization solve (these are also initial conditions for the first TSSolve() 314 315 Input Parameter: 316 u - uninitialized solution vector (global) 317 appctx - user-defined application context 318 319 Output Parameter: 320 u - vector with solution at initial time (global) 321 */ 322 PetscErrorCode InitialConditions(Vec u,AppCtx *appctx) 323 { 324 PetscScalar *s; 325 const PetscScalar *xg; 326 PetscInt i,j,lenglob; 327 PetscReal sum,val; 328 PetscRandom rand; 329 330 PetscFunctionBegin; 331 PetscCall(PetscRandomCreate(PETSC_COMM_WORLD,&rand)); 332 PetscCall(PetscRandomSetInterval(rand,.9,1.0)); 333 PetscCall(DMDAVecGetArray(appctx->da,u,&s)); 334 PetscCall(DMDAVecGetArrayRead(appctx->da,appctx->SEMop.grid,(void*)&xg)); 335 lenglob = appctx->param.E*(appctx->param.N-1); 336 for (i=0; i<lenglob; i++) { 337 s[i]= 0; 338 for (j=0; j<appctx->ncoeff; j++) { 339 PetscCall(PetscRandomGetValue(rand,&val)); 340 s[i] += val*PetscSinScalar(2*(j+1)*PETSC_PI*xg[i]); 341 } 342 } 343 PetscCall(PetscRandomDestroy(&rand)); 344 PetscCall(DMDAVecRestoreArray(appctx->da,u,&s)); 345 PetscCall(DMDAVecRestoreArrayRead(appctx->da,appctx->SEMop.grid,(void*)&xg)); 346 /* make sure initial conditions do not contain the constant functions, since with periodic boundary conditions the constant functions introduce a null space */ 347 PetscCall(VecSum(u,&sum)); 348 PetscCall(VecShift(u,-sum/lenglob)); 349 PetscFunctionReturn(0); 350 } 351 352 /* 353 TrueSolution() computes the true solution for the Tao optimization solve which means they are the initial conditions for the objective function. 354 355 InitialConditions() computes the initial conditions for the beginning of the Tao iterations 356 357 Input Parameter: 358 u - uninitialized solution vector (global) 359 appctx - user-defined application context 360 361 Output Parameter: 362 u - vector with solution at initial time (global) 363 */ 364 PetscErrorCode TrueSolution(Vec u,AppCtx *appctx) 365 { 366 PetscScalar *s; 367 const PetscScalar *xg; 368 PetscInt i,j,lenglob; 369 PetscReal sum; 370 371 PetscFunctionBegin; 372 PetscCall(DMDAVecGetArray(appctx->da,u,&s)); 373 PetscCall(DMDAVecGetArrayRead(appctx->da,appctx->SEMop.grid,(void*)&xg)); 374 lenglob = appctx->param.E*(appctx->param.N-1); 375 for (i=0; i<lenglob; i++) { 376 s[i]= 0; 377 for (j=0; j<appctx->ncoeff; j++) { 378 s[i] += appctx->solutioncoefficients[j]*PetscSinScalar(2*(j+1)*PETSC_PI*xg[i]); 379 } 380 } 381 PetscCall(DMDAVecRestoreArray(appctx->da,u,&s)); 382 PetscCall(DMDAVecRestoreArrayRead(appctx->da,appctx->SEMop.grid,(void*)&xg)); 383 /* make sure initial conditions do not contain the constant functions, since with periodic boundary conditions the constant functions introduce a null space */ 384 PetscCall(VecSum(u,&sum)); 385 PetscCall(VecShift(u,-sum/lenglob)); 386 PetscFunctionReturn(0); 387 } 388 /* --------------------------------------------------------------------- */ 389 /* 390 Sets the desired profile for the final end time 391 392 Input Parameters: 393 t - final time 394 obj - vector storing the desired profile 395 appctx - user-defined application context 396 397 */ 398 PetscErrorCode ComputeReference(TS ts,PetscReal t,Vec obj,AppCtx *appctx) 399 { 400 PetscScalar *s,tc; 401 const PetscScalar *xg; 402 PetscInt i, j,lenglob; 403 404 PetscFunctionBegin; 405 PetscCall(DMDAVecGetArray(appctx->da,obj,&s)); 406 PetscCall(DMDAVecGetArrayRead(appctx->da,appctx->SEMop.grid,(void*)&xg)); 407 lenglob = appctx->param.E*(appctx->param.N-1); 408 for (i=0; i<lenglob; i++) { 409 s[i]= 0; 410 for (j=0; j<appctx->ncoeff; j++) { 411 tc = -appctx->param.mu*(j+1)*(j+1)*4.0*PETSC_PI*PETSC_PI*t; 412 s[i] += appctx->solutioncoefficients[j]*PetscSinScalar(2*(j+1)*PETSC_PI*(xg[i] + appctx->param.a*t))*PetscExpReal(tc); 413 } 414 } 415 PetscCall(DMDAVecRestoreArray(appctx->da,obj,&s)); 416 PetscCall(DMDAVecRestoreArrayRead(appctx->da,appctx->SEMop.grid,(void*)&xg)); 417 PetscFunctionReturn(0); 418 } 419 420 PetscErrorCode RHSFunction(TS ts,PetscReal t,Vec globalin,Vec globalout,void *ctx) 421 { 422 AppCtx *appctx = (AppCtx*)ctx; 423 424 PetscFunctionBegin; 425 PetscCall(MatMult(appctx->SEMop.keptstiff,globalin,globalout)); 426 PetscFunctionReturn(0); 427 } 428 429 PetscErrorCode RHSJacobian(TS ts,PetscReal t,Vec globalin,Mat A, Mat B,void *ctx) 430 { 431 AppCtx *appctx = (AppCtx*)ctx; 432 433 PetscFunctionBegin; 434 PetscCall(MatCopy(appctx->SEMop.keptstiff,A,DIFFERENT_NONZERO_PATTERN)); 435 PetscFunctionReturn(0); 436 } 437 438 /* --------------------------------------------------------------------- */ 439 440 /* 441 RHSLaplacian - matrix for diffusion 442 443 Input Parameters: 444 ts - the TS context 445 t - current time (ignored) 446 X - current solution (ignored) 447 dummy - optional user-defined context, as set by TSetRHSJacobian() 448 449 Output Parameters: 450 AA - Jacobian matrix 451 BB - optionally different matrix from which the preconditioner is built 452 str - flag indicating matrix structure 453 454 Scales by the inverse of the mass matrix (perhaps that should be pulled out) 455 456 */ 457 PetscErrorCode RHSLaplacian(TS ts,PetscReal t,Vec X,Mat A,Mat BB,void *ctx) 458 { 459 PetscReal **temp; 460 PetscReal vv; 461 AppCtx *appctx = (AppCtx*)ctx; /* user-defined application context */ 462 PetscInt i,xs,xn,l,j; 463 PetscInt *rowsDM; 464 465 PetscFunctionBegin; 466 /* 467 Creates the element stiffness matrix for the given gll 468 */ 469 PetscCall(PetscGaussLobattoLegendreElementLaplacianCreate(appctx->SEMop.gll.n,appctx->SEMop.gll.nodes,appctx->SEMop.gll.weights,&temp)); 470 471 /* scale by the size of the element */ 472 for (i=0; i<appctx->param.N; i++) { 473 vv=-appctx->param.mu*2.0/appctx->param.Le; 474 for (j=0; j<appctx->param.N; j++) temp[i][j]=temp[i][j]*vv; 475 } 476 477 PetscCall(MatSetOption(A,MAT_NEW_NONZERO_ALLOCATION_ERR,PETSC_FALSE)); 478 PetscCall(DMDAGetCorners(appctx->da,&xs,NULL,NULL,&xn,NULL,NULL)); 479 480 PetscCheck(appctx->param.N-1 >= 1,PETSC_COMM_WORLD,PETSC_ERR_ARG_OUTOFRANGE,"Polynomial order must be at least 2"); 481 xs = xs/(appctx->param.N-1); 482 xn = xn/(appctx->param.N-1); 483 484 PetscCall(PetscMalloc1(appctx->param.N,&rowsDM)); 485 /* 486 loop over local elements 487 */ 488 for (j=xs; j<xs+xn; j++) { 489 for (l=0; l<appctx->param.N; l++) { 490 rowsDM[l] = 1+(j-xs)*(appctx->param.N-1)+l; 491 } 492 PetscCall(MatSetValuesLocal(A,appctx->param.N,rowsDM,appctx->param.N,rowsDM,&temp[0][0],ADD_VALUES)); 493 } 494 PetscCall(PetscFree(rowsDM)); 495 PetscCall(MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY)); 496 PetscCall(MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY)); 497 PetscCall(VecReciprocal(appctx->SEMop.mass)); 498 PetscCall(MatDiagonalScale(A,appctx->SEMop.mass,0)); 499 PetscCall(VecReciprocal(appctx->SEMop.mass)); 500 501 PetscCall(PetscGaussLobattoLegendreElementLaplacianDestroy(appctx->SEMop.gll.n,appctx->SEMop.gll.nodes,appctx->SEMop.gll.weights,&temp)); 502 PetscFunctionReturn(0); 503 } 504 505 /* 506 Almost identical to Laplacian 507 508 Note that the element matrix is NOT scaled by the size of element like the Laplacian term. 509 */ 510 PetscErrorCode RHSAdvection(TS ts,PetscReal t,Vec X,Mat A,Mat BB,void *ctx) 511 { 512 PetscReal **temp; 513 PetscReal vv; 514 AppCtx *appctx = (AppCtx*)ctx; /* user-defined application context */ 515 PetscInt i,xs,xn,l,j; 516 PetscInt *rowsDM; 517 518 PetscFunctionBegin; 519 /* 520 Creates the element stiffness matrix for the given gll 521 */ 522 PetscCall(PetscGaussLobattoLegendreElementAdvectionCreate(appctx->SEMop.gll.n,appctx->SEMop.gll.nodes,appctx->SEMop.gll.weights,&temp)); 523 524 /* scale by the size of the element */ 525 for (i=0; i<appctx->param.N; i++) { 526 vv = -appctx->param.a; 527 for (j=0; j<appctx->param.N; j++) temp[i][j]=temp[i][j]*vv; 528 } 529 530 PetscCall(MatSetOption(A,MAT_NEW_NONZERO_ALLOCATION_ERR,PETSC_FALSE)); 531 PetscCall(DMDAGetCorners(appctx->da,&xs,NULL,NULL,&xn,NULL,NULL)); 532 533 PetscCheck(appctx->param.N-1 >= 1,PETSC_COMM_WORLD,PETSC_ERR_ARG_OUTOFRANGE,"Polynomial order must be at least 2"); 534 xs = xs/(appctx->param.N-1); 535 xn = xn/(appctx->param.N-1); 536 537 PetscCall(PetscMalloc1(appctx->param.N,&rowsDM)); 538 /* 539 loop over local elements 540 */ 541 for (j=xs; j<xs+xn; j++) { 542 for (l=0; l<appctx->param.N; l++) { 543 rowsDM[l] = 1+(j-xs)*(appctx->param.N-1)+l; 544 } 545 PetscCall(MatSetValuesLocal(A,appctx->param.N,rowsDM,appctx->param.N,rowsDM,&temp[0][0],ADD_VALUES)); 546 } 547 PetscCall(PetscFree(rowsDM)); 548 PetscCall(MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY)); 549 PetscCall(MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY)); 550 PetscCall(VecReciprocal(appctx->SEMop.mass)); 551 PetscCall(MatDiagonalScale(A,appctx->SEMop.mass,0)); 552 PetscCall(VecReciprocal(appctx->SEMop.mass)); 553 554 PetscCall(PetscGaussLobattoLegendreElementAdvectionDestroy(appctx->SEMop.gll.n,appctx->SEMop.gll.nodes,appctx->SEMop.gll.weights,&temp)); 555 PetscFunctionReturn(0); 556 } 557 558 /* ------------------------------------------------------------------ */ 559 /* 560 FormFunctionGradient - Evaluates the function and corresponding gradient. 561 562 Input Parameters: 563 tao - the Tao context 564 ic - the input vector 565 ctx - optional user-defined context, as set when calling TaoSetObjectiveAndGradient() 566 567 Output Parameters: 568 f - the newly evaluated function 569 G - the newly evaluated gradient 570 571 Notes: 572 573 The forward equation is 574 M u_t = F(U) 575 which is converted to 576 u_t = M^{-1} F(u) 577 in the user code since TS has no direct way of providing a mass matrix. The Jacobian of this is 578 M^{-1} J 579 where J is the Jacobian of F. Now the adjoint equation is 580 M v_t = J^T v 581 but TSAdjoint does not solve this since it can only solve the transposed system for the 582 Jacobian the user provided. Hence TSAdjoint solves 583 w_t = J^T M^{-1} w (where w = M v) 584 since there is no way to indicate the mass matrix as a separate entity to TS. Thus one 585 must be careful in initializing the "adjoint equation" and using the result. This is 586 why 587 G = -2 M(u(T) - u_d) 588 below (instead of -2(u(T) - u_d) 589 590 */ 591 PetscErrorCode FormFunctionGradient(Tao tao,Vec ic,PetscReal *f,Vec G,void *ctx) 592 { 593 AppCtx *appctx = (AppCtx*)ctx; /* user-defined application context */ 594 Vec temp; 595 596 PetscFunctionBegin; 597 PetscCall(TSSetTime(appctx->ts,0.0)); 598 PetscCall(TSSetStepNumber(appctx->ts,0)); 599 PetscCall(TSSetTimeStep(appctx->ts,appctx->initial_dt)); 600 PetscCall(VecCopy(ic,appctx->dat.curr_sol)); 601 602 PetscCall(TSSolve(appctx->ts,appctx->dat.curr_sol)); 603 PetscCall(VecCopy(appctx->dat.curr_sol,appctx->dat.joe)); 604 605 /* Compute the difference between the current ODE solution and target ODE solution */ 606 PetscCall(VecWAXPY(G,-1.0,appctx->dat.curr_sol,appctx->dat.reference)); 607 608 /* Compute the objective/cost function */ 609 PetscCall(VecDuplicate(G,&temp)); 610 PetscCall(VecPointwiseMult(temp,G,G)); 611 PetscCall(VecDot(temp,appctx->SEMop.mass,f)); 612 PetscCall(VecDestroy(&temp)); 613 614 /* Compute initial conditions for the adjoint integration. See Notes above */ 615 PetscCall(VecScale(G, -2.0)); 616 PetscCall(VecPointwiseMult(G,G,appctx->SEMop.mass)); 617 PetscCall(TSSetCostGradients(appctx->ts,1,&G,NULL)); 618 619 PetscCall(TSAdjointSolve(appctx->ts)); 620 /* PetscCall(VecPointwiseDivide(G,G,appctx->SEMop.mass));*/ 621 PetscFunctionReturn(0); 622 } 623 624 PetscErrorCode MonitorError(Tao tao,void *ctx) 625 { 626 AppCtx *appctx = (AppCtx*)ctx; 627 Vec temp,grad; 628 PetscReal nrm; 629 PetscInt its; 630 PetscReal fct,gnorm; 631 632 PetscFunctionBegin; 633 PetscCall(VecDuplicate(appctx->dat.ic,&temp)); 634 PetscCall(VecWAXPY(temp,-1.0,appctx->dat.ic,appctx->dat.true_solution)); 635 PetscCall(VecPointwiseMult(temp,temp,temp)); 636 PetscCall(VecDot(temp,appctx->SEMop.mass,&nrm)); 637 nrm = PetscSqrtReal(nrm); 638 PetscCall(TaoGetGradient(tao,&grad,NULL,NULL)); 639 PetscCall(VecPointwiseMult(temp,temp,temp)); 640 PetscCall(VecDot(temp,appctx->SEMop.mass,&gnorm)); 641 gnorm = PetscSqrtReal(gnorm); 642 PetscCall(VecDestroy(&temp)); 643 PetscCall(TaoGetIterationNumber(tao,&its)); 644 PetscCall(TaoGetSolutionStatus(tao,NULL,&fct,NULL,NULL,NULL,NULL)); 645 if (!its) { 646 PetscCall(PetscPrintf(PETSC_COMM_WORLD,"%% Iteration Error Objective Gradient-norm\n")); 647 PetscCall(PetscPrintf(PETSC_COMM_WORLD,"history = [\n")); 648 } 649 PetscCall(PetscPrintf(PETSC_COMM_WORLD,"%3D %g %g %g\n",its,(double)nrm,(double)fct,(double)gnorm)); 650 PetscFunctionReturn(0); 651 } 652 653 PetscErrorCode MonitorDestroy(void **ctx) 654 { 655 PetscFunctionBegin; 656 PetscCall(PetscPrintf(PETSC_COMM_WORLD,"];\n")); 657 PetscFunctionReturn(0); 658 } 659 660 /*TEST 661 662 build: 663 requires: !complex 664 665 test: 666 requires: !single 667 args: -ts_adapt_dt_max 3.e-3 -E 10 -N 8 -ncoeff 5 -tao_bqnls_mat_lmvm_scale_type none 668 669 test: 670 suffix: cn 671 requires: !single 672 args: -ts_type cn -ts_dt .003 -pc_type lu -E 10 -N 8 -ncoeff 5 -tao_bqnls_mat_lmvm_scale_type none 673 674 test: 675 suffix: 2 676 requires: !single 677 args: -ts_adapt_dt_max 3.e-3 -E 10 -N 8 -ncoeff 5 -a .1 -tao_bqnls_mat_lmvm_scale_type none 678 679 TEST*/ 680