1 static char help[] = "Demonstrates adjoint sensitivity analysis for Reaction-Diffusion Equations.\n"; 2 3 /* 4 REQUIRES configuration of PETSc with option --download-adolc. 5 6 For documentation on ADOL-C, see 7 $PETSC_ARCH/externalpackages/ADOL-C-2.6.0/ADOL-C/doc/adolc-manual.pdf 8 9 REQUIRES configuration of PETSc with option --download-colpack 10 11 For documentation on ColPack, see 12 $PETSC_ARCH/externalpackages/git.colpack/README.md 13 */ 14 /* ------------------------------------------------------------------------ 15 See ../advection-diffusion-reaction/ex5 for a description of the problem 16 ------------------------------------------------------------------------- */ 17 18 /* 19 Runtime options: 20 21 Solver: 22 -forwardonly - Run the forward simulation without adjoint. 23 -implicitform - Provide IFunction and IJacobian to TS, if not set, RHSFunction and RHSJacobian will be used. 24 -aijpc - Set the preconditioner matrix to be aij (the Jacobian matrix can be of a different type such as ELL). 25 26 Jacobian generation: 27 -jacobian_by_hand - Use the hand-coded Jacobian of ex5.c, rather than generating it automatically. 28 -no_annotation - Do not annotate ADOL-C active variables. (Should be used alongside -jacobian_by_hand.) 29 -adolc_sparse - Calculate Jacobian in compressed format, using ADOL-C sparse functionality. 30 -adolc_sparse_view - Print sparsity pattern used by -adolc_sparse option. 31 */ 32 /* 33 NOTE: If -adolc_sparse option is used, at least four processors should be used, so that no processor owns boundaries which are 34 identified by the periodic boundary conditions. The number of grid points in both x- and y-directions should be multiples 35 of 5, in order for the 5-point stencil to be cleanly parallelised. 36 */ 37 38 #include <petscdmda.h> 39 #include <petscts.h> 40 #include "adolc-utils/drivers.cxx" 41 #include <adolc/adolc.h> 42 43 /* (Passive) field for the two variables */ 44 typedef struct { 45 PetscScalar u, v; 46 } Field; 47 48 /* Active field for the two variables */ 49 typedef struct { 50 adouble u, v; 51 } AField; 52 53 /* Application context */ 54 typedef struct { 55 PetscReal D1, D2, gamma, kappa; 56 AField **u_a, **f_a; 57 PetscBool aijpc; 58 AdolcCtx *adctx; /* Automatic differentation support */ 59 } AppCtx; 60 61 extern PetscErrorCode InitialConditions(DM da, Vec U); 62 extern PetscErrorCode InitializeLambda(DM da, Vec lambda, PetscReal x, PetscReal y); 63 extern PetscErrorCode IFunctionLocalPassive(DMDALocalInfo *info, PetscReal t, Field **u, Field **udot, Field **f, void *ptr); 64 extern PetscErrorCode IFunctionActive(TS ts, PetscReal ftime, Vec U, Vec Udot, Vec F, void *ptr); 65 extern PetscErrorCode RHSFunctionActive(TS ts, PetscReal ftime, Vec U, Vec F, void *ptr); 66 extern PetscErrorCode RHSFunctionPassive(TS ts, PetscReal ftime, Vec U, Vec F, void *ptr); 67 extern PetscErrorCode IJacobianByHand(TS ts, PetscReal t, Vec U, Vec Udot, PetscReal a, Mat A, Mat B, void *ctx); 68 extern PetscErrorCode IJacobianAdolc(TS ts, PetscReal t, Vec U, Vec Udot, PetscReal a, Mat A, Mat B, void *ctx); 69 extern PetscErrorCode RHSJacobianByHand(TS ts, PetscReal t, Vec U, Mat A, Mat B, void *ctx); 70 extern PetscErrorCode RHSJacobianAdolc(TS ts, PetscReal t, Vec U, Mat A, Mat B, void *ctx); 71 72 int main(int argc, char **argv) { 73 TS ts; 74 Vec x, r, xdot; 75 DM da; 76 AppCtx appctx; 77 AdolcCtx *adctx; 78 Vec lambda[1]; 79 PetscBool forwardonly = PETSC_FALSE, implicitform = PETSC_FALSE, byhand = PETSC_FALSE; 80 PetscInt gxm, gym, i, dofs = 2, ctrl[3] = {0, 0, 0}; 81 PetscScalar **Seed = NULL, **Rec = NULL, *u_vec; 82 unsigned int **JP = NULL; 83 ISColoring iscoloring; 84 85 PetscFunctionBeginUser; 86 PetscCall(PetscInitialize(&argc, &argv, NULL, help)); 87 PetscCall(PetscNew(&adctx)); 88 PetscCall(PetscOptionsGetBool(NULL, NULL, "-forwardonly", &forwardonly, NULL)); 89 PetscCall(PetscOptionsGetBool(NULL, NULL, "-implicitform", &implicitform, NULL)); 90 appctx.aijpc = PETSC_FALSE, adctx->no_an = PETSC_FALSE, adctx->sparse = PETSC_FALSE, adctx->sparse_view = PETSC_FALSE; 91 adctx->sparse_view_done = PETSC_FALSE; 92 PetscCall(PetscOptionsGetBool(NULL, NULL, "-aijpc", &appctx.aijpc, NULL)); 93 PetscCall(PetscOptionsGetBool(NULL, NULL, "-no_annotation", &adctx->no_an, NULL)); 94 PetscCall(PetscOptionsGetBool(NULL, NULL, "-jacobian_by_hand", &byhand, NULL)); 95 PetscCall(PetscOptionsGetBool(NULL, NULL, "-adolc_sparse", &adctx->sparse, NULL)); 96 PetscCall(PetscOptionsGetBool(NULL, NULL, "-adolc_sparse_view", &adctx->sparse_view, NULL)); 97 appctx.D1 = 8.0e-5; 98 appctx.D2 = 4.0e-5; 99 appctx.gamma = .024; 100 appctx.kappa = .06; 101 appctx.adctx = adctx; 102 103 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 104 Create distributed array (DMDA) to manage parallel grid and vectors 105 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 106 PetscCall(DMDACreate2d(PETSC_COMM_WORLD, DM_BOUNDARY_PERIODIC, DM_BOUNDARY_PERIODIC, DMDA_STENCIL_STAR, 65, 65, PETSC_DECIDE, PETSC_DECIDE, 2, 1, NULL, NULL, &da)); 107 PetscCall(DMSetFromOptions(da)); 108 PetscCall(DMSetUp(da)); 109 PetscCall(DMDASetFieldName(da, 0, "u")); 110 PetscCall(DMDASetFieldName(da, 1, "v")); 111 112 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 113 Extract global vectors from DMDA; then duplicate for remaining 114 vectors that are the same types 115 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 116 PetscCall(DMCreateGlobalVector(da, &x)); 117 PetscCall(VecDuplicate(x, &r)); 118 PetscCall(VecDuplicate(x, &xdot)); 119 120 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 121 Create timestepping solver context 122 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 123 PetscCall(TSCreate(PETSC_COMM_WORLD, &ts)); 124 PetscCall(TSSetType(ts, TSCN)); 125 PetscCall(TSSetDM(ts, da)); 126 PetscCall(TSSetProblemType(ts, TS_NONLINEAR)); 127 if (!implicitform) { 128 PetscCall(TSSetRHSFunction(ts, NULL, RHSFunctionPassive, &appctx)); 129 } else { 130 PetscCall(DMDATSSetIFunctionLocal(da, INSERT_VALUES, (DMDATSIFunctionLocal)IFunctionLocalPassive, &appctx)); 131 } 132 133 if (!adctx->no_an) { 134 PetscCall(DMDAGetGhostCorners(da, NULL, NULL, NULL, &gxm, &gym, NULL)); 135 adctx->m = dofs * gxm * gym; 136 adctx->n = dofs * gxm * gym; /* Number of dependent and independent variables */ 137 138 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 139 Trace function(s) just once 140 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 141 if (!implicitform) { 142 PetscCall(RHSFunctionActive(ts, 1.0, x, r, &appctx)); 143 } else { 144 PetscCall(IFunctionActive(ts, 1.0, x, xdot, r, &appctx)); 145 } 146 147 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 148 In the case where ADOL-C generates the Jacobian in compressed format, 149 seed and recovery matrices are required. Since the sparsity structure 150 of the Jacobian does not change over the course of the time 151 integration, we can save computational effort by only generating 152 these objects once. 153 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 154 if (adctx->sparse) { 155 /* 156 Generate sparsity pattern 157 158 One way to do this is to use the Jacobian pattern driver `jac_pat` 159 provided by ColPack. Otherwise, it is the responsibility of the user 160 to obtain the sparsity pattern. 161 */ 162 PetscCall(PetscMalloc1(adctx->n, &u_vec)); 163 JP = (unsigned int **)malloc(adctx->m * sizeof(unsigned int *)); 164 jac_pat(1, adctx->m, adctx->n, u_vec, JP, ctrl); 165 if (adctx->sparse_view) { PetscCall(PrintSparsity(MPI_COMM_WORLD, adctx->m, JP)); } 166 167 /* 168 Extract a column colouring 169 170 For problems using DMDA, colourings can be extracted directly, as 171 follows. There are also ColPack drivers available. Otherwise, it is the 172 responsibility of the user to obtain a suitable colouring. 173 */ 174 PetscCall(DMCreateColoring(da, IS_COLORING_LOCAL, &iscoloring)); 175 PetscCall(ISColoringGetIS(iscoloring, PETSC_USE_POINTER, &adctx->p, NULL)); 176 177 /* Generate seed matrix to propagate through the forward mode of AD */ 178 PetscCall(AdolcMalloc2(adctx->n, adctx->p, &Seed)); 179 PetscCall(GenerateSeedMatrix(iscoloring, Seed)); 180 PetscCall(ISColoringDestroy(&iscoloring)); 181 182 /* 183 Generate recovery matrix, which is used to recover the Jacobian from 184 compressed format */ 185 PetscCall(AdolcMalloc2(adctx->m, adctx->p, &Rec)); 186 PetscCall(GetRecoveryMatrix(Seed, JP, adctx->m, adctx->p, Rec)); 187 188 /* Store results and free workspace */ 189 adctx->Rec = Rec; 190 for (i = 0; i < adctx->m; i++) free(JP[i]); 191 free(JP); 192 PetscCall(PetscFree(u_vec)); 193 194 } else { 195 /* 196 In 'full' Jacobian mode, propagate an identity matrix through the 197 forward mode of AD. 198 */ 199 adctx->p = adctx->n; 200 PetscCall(AdolcMalloc2(adctx->n, adctx->p, &Seed)); 201 PetscCall(Identity(adctx->n, Seed)); 202 } 203 adctx->Seed = Seed; 204 } 205 206 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 207 Set Jacobian 208 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 209 if (!implicitform) { 210 if (!byhand) { 211 PetscCall(TSSetRHSJacobian(ts, NULL, NULL, RHSJacobianAdolc, &appctx)); 212 } else { 213 PetscCall(TSSetRHSJacobian(ts, NULL, NULL, RHSJacobianByHand, &appctx)); 214 } 215 } else { 216 if (appctx.aijpc) { 217 Mat A, B; 218 219 PetscCall(DMSetMatType(da, MATSELL)); 220 PetscCall(DMCreateMatrix(da, &A)); 221 PetscCall(MatConvert(A, MATAIJ, MAT_INITIAL_MATRIX, &B)); 222 /* FIXME do we need to change viewer to display matrix in natural ordering as DMCreateMatrix_DA does? */ 223 if (!byhand) { 224 PetscCall(TSSetIJacobian(ts, A, B, IJacobianAdolc, &appctx)); 225 } else { 226 PetscCall(TSSetIJacobian(ts, A, B, IJacobianByHand, &appctx)); 227 } 228 PetscCall(MatDestroy(&A)); 229 PetscCall(MatDestroy(&B)); 230 } else { 231 if (!byhand) { 232 PetscCall(TSSetIJacobian(ts, NULL, NULL, IJacobianAdolc, &appctx)); 233 } else { 234 PetscCall(TSSetIJacobian(ts, NULL, NULL, IJacobianByHand, &appctx)); 235 } 236 } 237 } 238 239 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 240 Set initial conditions 241 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 242 PetscCall(InitialConditions(da, x)); 243 PetscCall(TSSetSolution(ts, x)); 244 245 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 246 Have the TS save its trajectory so that TSAdjointSolve() may be used 247 and set solver options 248 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 249 if (!forwardonly) { 250 PetscCall(TSSetSaveTrajectory(ts)); 251 PetscCall(TSSetMaxTime(ts, 200.0)); 252 PetscCall(TSSetTimeStep(ts, 0.5)); 253 } else { 254 PetscCall(TSSetMaxTime(ts, 2000.0)); 255 PetscCall(TSSetTimeStep(ts, 10)); 256 } 257 PetscCall(TSSetExactFinalTime(ts, TS_EXACTFINALTIME_STEPOVER)); 258 PetscCall(TSSetFromOptions(ts)); 259 260 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 261 Solve ODE system 262 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 263 PetscCall(TSSolve(ts, x)); 264 if (!forwardonly) { 265 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 266 Start the Adjoint model 267 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 268 PetscCall(VecDuplicate(x, &lambda[0])); 269 /* Reset initial conditions for the adjoint integration */ 270 PetscCall(InitializeLambda(da, lambda[0], 0.5, 0.5)); 271 PetscCall(TSSetCostGradients(ts, 1, lambda, NULL)); 272 PetscCall(TSAdjointSolve(ts)); 273 PetscCall(VecDestroy(&lambda[0])); 274 } 275 276 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 277 Free work space. 278 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 279 PetscCall(VecDestroy(&xdot)); 280 PetscCall(VecDestroy(&r)); 281 PetscCall(VecDestroy(&x)); 282 PetscCall(TSDestroy(&ts)); 283 if (!adctx->no_an) { 284 if (adctx->sparse) PetscCall(AdolcFree2(Rec)); 285 PetscCall(AdolcFree2(Seed)); 286 } 287 PetscCall(DMDestroy(&da)); 288 PetscCall(PetscFree(adctx)); 289 PetscCall(PetscFinalize()); 290 return 0; 291 } 292 293 PetscErrorCode InitialConditions(DM da, Vec U) { 294 PetscInt i, j, xs, ys, xm, ym, Mx, My; 295 Field **u; 296 PetscReal hx, hy, x, y; 297 298 PetscFunctionBegin; 299 PetscCall(DMDAGetInfo(da, PETSC_IGNORE, &Mx, &My, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE)); 300 301 hx = 2.5 / (PetscReal)Mx; 302 hy = 2.5 / (PetscReal)My; 303 304 /* 305 Get pointers to vector data 306 */ 307 PetscCall(DMDAVecGetArray(da, U, &u)); 308 309 /* 310 Get local grid boundaries 311 */ 312 PetscCall(DMDAGetCorners(da, &xs, &ys, NULL, &xm, &ym, NULL)); 313 314 /* 315 Compute function over the locally owned part of the grid 316 */ 317 for (j = ys; j < ys + ym; j++) { 318 y = j * hy; 319 for (i = xs; i < xs + xm; i++) { 320 x = i * hx; 321 if (PetscApproximateGTE(x, 1.0) && PetscApproximateLTE(x, 1.5) && PetscApproximateGTE(y, 1.0) && PetscApproximateLTE(y, 1.5)) 322 u[j][i].v = PetscPowReal(PetscSinReal(4.0 * PETSC_PI * x), 2.0) * PetscPowReal(PetscSinReal(4.0 * PETSC_PI * y), 2.0) / 4.0; 323 else u[j][i].v = 0.0; 324 325 u[j][i].u = 1.0 - 2.0 * u[j][i].v; 326 } 327 } 328 329 /* 330 Restore vectors 331 */ 332 PetscCall(DMDAVecRestoreArray(da, U, &u)); 333 PetscFunctionReturn(0); 334 } 335 336 PetscErrorCode InitializeLambda(DM da, Vec lambda, PetscReal x, PetscReal y) { 337 PetscInt i, j, Mx, My, xs, ys, xm, ym; 338 Field **l; 339 PetscFunctionBegin; 340 341 PetscCall(DMDAGetInfo(da, PETSC_IGNORE, &Mx, &My, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE)); 342 /* locate the global i index for x and j index for y */ 343 i = (PetscInt)(x * (Mx - 1)); 344 j = (PetscInt)(y * (My - 1)); 345 PetscCall(DMDAGetCorners(da, &xs, &ys, NULL, &xm, &ym, NULL)); 346 347 if (xs <= i && i < xs + xm && ys <= j && j < ys + ym) { 348 /* the i,j vertex is on this process */ 349 PetscCall(DMDAVecGetArray(da, lambda, &l)); 350 l[j][i].u = 1.0; 351 l[j][i].v = 1.0; 352 PetscCall(DMDAVecRestoreArray(da, lambda, &l)); 353 } 354 PetscFunctionReturn(0); 355 } 356 357 PetscErrorCode IFunctionLocalPassive(DMDALocalInfo *info, PetscReal t, Field **u, Field **udot, Field **f, void *ptr) { 358 AppCtx *appctx = (AppCtx *)ptr; 359 PetscInt i, j, xs, ys, xm, ym; 360 PetscReal hx, hy, sx, sy; 361 PetscScalar uc, uxx, uyy, vc, vxx, vyy; 362 363 PetscFunctionBegin; 364 hx = 2.50 / (PetscReal)(info->mx); 365 sx = 1.0 / (hx * hx); 366 hy = 2.50 / (PetscReal)(info->my); 367 sy = 1.0 / (hy * hy); 368 369 /* Get local grid boundaries */ 370 xs = info->xs; 371 xm = info->xm; 372 ys = info->ys; 373 ym = info->ym; 374 375 /* Compute function over the locally owned part of the grid */ 376 for (j = ys; j < ys + ym; j++) { 377 for (i = xs; i < xs + xm; i++) { 378 uc = u[j][i].u; 379 uxx = (-2.0 * uc + u[j][i - 1].u + u[j][i + 1].u) * sx; 380 uyy = (-2.0 * uc + u[j - 1][i].u + u[j + 1][i].u) * sy; 381 vc = u[j][i].v; 382 vxx = (-2.0 * vc + u[j][i - 1].v + u[j][i + 1].v) * sx; 383 vyy = (-2.0 * vc + u[j - 1][i].v + u[j + 1][i].v) * sy; 384 f[j][i].u = udot[j][i].u - appctx->D1 * (uxx + uyy) + uc * vc * vc - appctx->gamma * (1.0 - uc); 385 f[j][i].v = udot[j][i].v - appctx->D2 * (vxx + vyy) - uc * vc * vc + (appctx->gamma + appctx->kappa) * vc; 386 } 387 } 388 PetscCall(PetscLogFlops(16.0 * xm * ym)); 389 PetscFunctionReturn(0); 390 } 391 392 PetscErrorCode IFunctionActive(TS ts, PetscReal ftime, Vec U, Vec Udot, Vec F, void *ptr) { 393 AppCtx *appctx = (AppCtx *)ptr; 394 DM da; 395 DMDALocalInfo info; 396 Field **u, **f, **udot; 397 Vec localU; 398 PetscInt i, j, xs, ys, xm, ym, gxs, gys, gxm, gym; 399 PetscReal hx, hy, sx, sy; 400 adouble uc, uxx, uyy, vc, vxx, vyy; 401 AField **f_a, *f_c, **u_a, *u_c; 402 PetscScalar dummy; 403 404 PetscFunctionBegin; 405 406 PetscCall(TSGetDM(ts, &da)); 407 PetscCall(DMDAGetLocalInfo(da, &info)); 408 PetscCall(DMGetLocalVector(da, &localU)); 409 hx = 2.50 / (PetscReal)(info.mx); 410 sx = 1.0 / (hx * hx); 411 hy = 2.50 / (PetscReal)(info.my); 412 sy = 1.0 / (hy * hy); 413 xs = info.xs; 414 xm = info.xm; 415 gxs = info.gxs; 416 gxm = info.gxm; 417 ys = info.ys; 418 ym = info.ym; 419 gys = info.gys; 420 gym = info.gym; 421 422 /* 423 Scatter ghost points to local vector,using the 2-step process 424 DMGlobalToLocalBegin(),DMGlobalToLocalEnd(). 425 By placing code between these two statements, computations can be 426 done while messages are in transition. 427 */ 428 PetscCall(DMGlobalToLocalBegin(da, U, INSERT_VALUES, localU)); 429 PetscCall(DMGlobalToLocalEnd(da, U, INSERT_VALUES, localU)); 430 431 /* 432 Get pointers to vector data 433 */ 434 PetscCall(DMDAVecGetArrayRead(da, localU, &u)); 435 PetscCall(DMDAVecGetArray(da, F, &f)); 436 PetscCall(DMDAVecGetArrayRead(da, Udot, &udot)); 437 438 /* 439 Create contiguous 1-arrays of AFields 440 441 NOTE: Memory for ADOL-C active variables (such as adouble and AField) 442 cannot be allocated using PetscMalloc, as this does not call the 443 relevant class constructor. Instead, we use the C++ keyword `new`. 444 */ 445 u_c = new AField[info.gxm * info.gym]; 446 f_c = new AField[info.gxm * info.gym]; 447 448 /* Create corresponding 2-arrays of AFields */ 449 u_a = new AField *[info.gym]; 450 f_a = new AField *[info.gym]; 451 452 /* Align indices between array types to endow 2d array with ghost points */ 453 PetscCall(GiveGhostPoints(da, u_c, &u_a)); 454 PetscCall(GiveGhostPoints(da, f_c, &f_a)); 455 456 trace_on(1); /* Start of active section on tape 1 */ 457 458 /* 459 Mark independence 460 461 NOTE: Ghost points are marked as independent, in place of the points they represent on 462 other processors / on other boundaries. 463 */ 464 for (j = gys; j < gys + gym; j++) { 465 for (i = gxs; i < gxs + gxm; i++) { 466 u_a[j][i].u <<= u[j][i].u; 467 u_a[j][i].v <<= u[j][i].v; 468 } 469 } 470 471 /* Compute function over the locally owned part of the grid */ 472 for (j = ys; j < ys + ym; j++) { 473 for (i = xs; i < xs + xm; i++) { 474 uc = u_a[j][i].u; 475 uxx = (-2.0 * uc + u_a[j][i - 1].u + u_a[j][i + 1].u) * sx; 476 uyy = (-2.0 * uc + u_a[j - 1][i].u + u_a[j + 1][i].u) * sy; 477 vc = u_a[j][i].v; 478 vxx = (-2.0 * vc + u_a[j][i - 1].v + u_a[j][i + 1].v) * sx; 479 vyy = (-2.0 * vc + u_a[j - 1][i].v + u_a[j + 1][i].v) * sy; 480 f_a[j][i].u = udot[j][i].u - appctx->D1 * (uxx + uyy) + uc * vc * vc - appctx->gamma * (1.0 - uc); 481 f_a[j][i].v = udot[j][i].v - appctx->D2 * (vxx + vyy) - uc * vc * vc + (appctx->gamma + appctx->kappa) * vc; 482 } 483 } 484 485 /* 486 Mark dependence 487 488 NOTE: Marking dependence of dummy variables makes the index notation much simpler when forming 489 the Jacobian later. 490 */ 491 for (j = gys; j < gys + gym; j++) { 492 for (i = gxs; i < gxs + gxm; i++) { 493 if ((i < xs) || (i >= xs + xm) || (j < ys) || (j >= ys + ym)) { 494 f_a[j][i].u >>= dummy; 495 f_a[j][i].v >>= dummy; 496 } else { 497 f_a[j][i].u >>= f[j][i].u; 498 f_a[j][i].v >>= f[j][i].v; 499 } 500 } 501 } 502 trace_off(); /* End of active section */ 503 PetscCall(PetscLogFlops(16.0 * xm * ym)); 504 505 /* Restore vectors */ 506 PetscCall(DMDAVecRestoreArray(da, F, &f)); 507 PetscCall(DMDAVecRestoreArrayRead(da, localU, &u)); 508 PetscCall(DMDAVecRestoreArrayRead(da, Udot, &udot)); 509 510 PetscCall(DMRestoreLocalVector(da, &localU)); 511 512 /* Destroy AFields appropriately */ 513 f_a += info.gys; 514 u_a += info.gys; 515 delete[] f_a; 516 delete[] u_a; 517 delete[] f_c; 518 delete[] u_c; 519 520 PetscFunctionReturn(0); 521 } 522 523 PetscErrorCode RHSFunctionPassive(TS ts, PetscReal ftime, Vec U, Vec F, void *ptr) { 524 AppCtx *appctx = (AppCtx *)ptr; 525 DM da; 526 PetscInt i, j, xs, ys, xm, ym, Mx, My; 527 PetscReal hx, hy, sx, sy; 528 PetscScalar uc, uxx, uyy, vc, vxx, vyy; 529 Field **u, **f; 530 Vec localU, localF; 531 532 PetscFunctionBegin; 533 PetscCall(TSGetDM(ts, &da)); 534 PetscCall(DMDAGetInfo(da, PETSC_IGNORE, &Mx, &My, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE)); 535 hx = 2.50 / (PetscReal)(Mx); 536 sx = 1.0 / (hx * hx); 537 hy = 2.50 / (PetscReal)(My); 538 sy = 1.0 / (hy * hy); 539 PetscCall(DMGetLocalVector(da, &localU)); 540 PetscCall(DMGetLocalVector(da, &localF)); 541 542 /* 543 Scatter ghost points to local vector,using the 2-step process 544 DMGlobalToLocalBegin(),DMGlobalToLocalEnd(). 545 By placing code between these two statements, computations can be 546 done while messages are in transition. 547 */ 548 PetscCall(DMGlobalToLocalBegin(da, U, INSERT_VALUES, localU)); 549 PetscCall(DMGlobalToLocalEnd(da, U, INSERT_VALUES, localU)); 550 PetscCall(VecZeroEntries(F)); // NOTE (1): See (2) below 551 PetscCall(DMGlobalToLocalBegin(da, F, INSERT_VALUES, localF)); 552 PetscCall(DMGlobalToLocalEnd(da, F, INSERT_VALUES, localF)); 553 554 /* 555 Get pointers to vector data 556 */ 557 PetscCall(DMDAVecGetArrayRead(da, localU, &u)); 558 PetscCall(DMDAVecGetArray(da, localF, &f)); 559 560 /* 561 Get local grid boundaries 562 */ 563 PetscCall(DMDAGetCorners(da, &xs, &ys, NULL, &xm, &ym, NULL)); 564 565 /* 566 Compute function over the locally owned part of the grid 567 */ 568 for (j = ys; j < ys + ym; j++) { 569 for (i = xs; i < xs + xm; i++) { 570 uc = u[j][i].u; 571 uxx = (-2.0 * uc + u[j][i - 1].u + u[j][i + 1].u) * sx; 572 uyy = (-2.0 * uc + u[j - 1][i].u + u[j + 1][i].u) * sy; 573 vc = u[j][i].v; 574 vxx = (-2.0 * vc + u[j][i - 1].v + u[j][i + 1].v) * sx; 575 vyy = (-2.0 * vc + u[j - 1][i].v + u[j + 1][i].v) * sy; 576 f[j][i].u = appctx->D1 * (uxx + uyy) - uc * vc * vc + appctx->gamma * (1.0 - uc); 577 f[j][i].v = appctx->D2 * (vxx + vyy) + uc * vc * vc - (appctx->gamma + appctx->kappa) * vc; 578 } 579 } 580 581 /* 582 Gather global vector, using the 2-step process 583 DMLocalToGlobalBegin(),DMLocalToGlobalEnd(). 584 585 NOTE (2): We need to use ADD_VALUES if boundaries are not of type DM_BOUNDARY_NONE or 586 DM_BOUNDARY_GHOSTED, meaning we should also zero F before addition (see (1) above). 587 */ 588 PetscCall(DMLocalToGlobalBegin(da, localF, ADD_VALUES, F)); 589 PetscCall(DMLocalToGlobalEnd(da, localF, ADD_VALUES, F)); 590 591 /* 592 Restore vectors 593 */ 594 PetscCall(DMDAVecRestoreArray(da, localF, &f)); 595 PetscCall(DMDAVecRestoreArrayRead(da, localU, &u)); 596 PetscCall(DMRestoreLocalVector(da, &localF)); 597 PetscCall(DMRestoreLocalVector(da, &localU)); 598 PetscCall(PetscLogFlops(16.0 * xm * ym)); 599 PetscFunctionReturn(0); 600 } 601 602 PetscErrorCode RHSFunctionActive(TS ts, PetscReal ftime, Vec U, Vec F, void *ptr) { 603 AppCtx *appctx = (AppCtx *)ptr; 604 DM da; 605 PetscInt i, j, xs, ys, xm, ym, gxs, gys, gxm, gym, Mx, My; 606 PetscReal hx, hy, sx, sy; 607 AField **f_a, *f_c, **u_a, *u_c; 608 adouble uc, uxx, uyy, vc, vxx, vyy; 609 Field **u, **f; 610 Vec localU, localF; 611 612 PetscFunctionBegin; 613 PetscCall(TSGetDM(ts, &da)); 614 PetscCall(DMDAGetInfo(da, PETSC_IGNORE, &Mx, &My, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE)); 615 hx = 2.50 / (PetscReal)(Mx); 616 sx = 1.0 / (hx * hx); 617 hy = 2.50 / (PetscReal)(My); 618 sy = 1.0 / (hy * hy); 619 PetscCall(DMGetLocalVector(da, &localU)); 620 PetscCall(DMGetLocalVector(da, &localF)); 621 622 /* 623 Scatter ghost points to local vector,using the 2-step process 624 DMGlobalToLocalBegin(),DMGlobalToLocalEnd(). 625 By placing code between these two statements, computations can be 626 done while messages are in transition. 627 */ 628 PetscCall(DMGlobalToLocalBegin(da, U, INSERT_VALUES, localU)); 629 PetscCall(DMGlobalToLocalEnd(da, U, INSERT_VALUES, localU)); 630 PetscCall(VecZeroEntries(F)); // NOTE (1): See (2) below 631 PetscCall(DMGlobalToLocalBegin(da, F, INSERT_VALUES, localF)); 632 PetscCall(DMGlobalToLocalEnd(da, F, INSERT_VALUES, localF)); 633 634 /* 635 Get pointers to vector data 636 */ 637 PetscCall(DMDAVecGetArrayRead(da, localU, &u)); 638 PetscCall(DMDAVecGetArray(da, localF, &f)); 639 640 /* 641 Get local and ghosted grid boundaries 642 */ 643 PetscCall(DMDAGetGhostCorners(da, &gxs, &gys, NULL, &gxm, &gym, NULL)); 644 PetscCall(DMDAGetCorners(da, &xs, &ys, NULL, &xm, &ym, NULL)); 645 646 /* 647 Create contiguous 1-arrays of AFields 648 649 NOTE: Memory for ADOL-C active variables (such as adouble and AField) 650 cannot be allocated using PetscMalloc, as this does not call the 651 relevant class constructor. Instead, we use the C++ keyword `new`. 652 */ 653 u_c = new AField[gxm * gym]; 654 f_c = new AField[gxm * gym]; 655 656 /* Create corresponding 2-arrays of AFields */ 657 u_a = new AField *[gym]; 658 f_a = new AField *[gym]; 659 660 /* Align indices between array types to endow 2d array with ghost points */ 661 PetscCall(GiveGhostPoints(da, u_c, &u_a)); 662 PetscCall(GiveGhostPoints(da, f_c, &f_a)); 663 664 /* 665 Compute function over the locally owned part of the grid 666 */ 667 trace_on(1); // ----------------------------------------------- Start of active section 668 669 /* 670 Mark independence 671 672 NOTE: Ghost points are marked as independent, in place of the points they represent on 673 other processors / on other boundaries. 674 */ 675 for (j = gys; j < gys + gym; j++) { 676 for (i = gxs; i < gxs + gxm; i++) { 677 u_a[j][i].u <<= u[j][i].u; 678 u_a[j][i].v <<= u[j][i].v; 679 } 680 } 681 682 /* 683 Compute function over the locally owned part of the grid 684 */ 685 for (j = ys; j < ys + ym; j++) { 686 for (i = xs; i < xs + xm; i++) { 687 uc = u_a[j][i].u; 688 uxx = (-2.0 * uc + u_a[j][i - 1].u + u_a[j][i + 1].u) * sx; 689 uyy = (-2.0 * uc + u_a[j - 1][i].u + u_a[j + 1][i].u) * sy; 690 vc = u_a[j][i].v; 691 vxx = (-2.0 * vc + u_a[j][i - 1].v + u_a[j][i + 1].v) * sx; 692 vyy = (-2.0 * vc + u_a[j - 1][i].v + u_a[j + 1][i].v) * sy; 693 f_a[j][i].u = appctx->D1 * (uxx + uyy) - uc * vc * vc + appctx->gamma * (1.0 - uc); 694 f_a[j][i].v = appctx->D2 * (vxx + vyy) + uc * vc * vc - (appctx->gamma + appctx->kappa) * vc; 695 } 696 } 697 /* 698 Mark dependence 699 700 NOTE: Ghost points are marked as dependent in order to vastly simplify index notation 701 during Jacobian assembly. 702 */ 703 for (j = gys; j < gys + gym; j++) { 704 for (i = gxs; i < gxs + gxm; i++) { 705 f_a[j][i].u >>= f[j][i].u; 706 f_a[j][i].v >>= f[j][i].v; 707 } 708 } 709 trace_off(); // ----------------------------------------------- End of active section 710 711 /* Test zeroth order scalar evaluation in ADOL-C gives the same result */ 712 // if (appctx->adctx->zos) { 713 // PetscCall(TestZOS2d(da,f,u,appctx)); // FIXME: Why does this give nonzero? 714 // } 715 716 /* 717 Gather global vector, using the 2-step process 718 DMLocalToGlobalBegin(),DMLocalToGlobalEnd(). 719 720 NOTE (2): We need to use ADD_VALUES if boundaries are not of type DM_BOUNDARY_NONE or 721 DM_BOUNDARY_GHOSTED, meaning we should also zero F before addition (see (1) above). 722 */ 723 PetscCall(DMLocalToGlobalBegin(da, localF, ADD_VALUES, F)); 724 PetscCall(DMLocalToGlobalEnd(da, localF, ADD_VALUES, F)); 725 726 /* 727 Restore vectors 728 */ 729 PetscCall(DMDAVecRestoreArray(da, localF, &f)); 730 PetscCall(DMDAVecRestoreArrayRead(da, localU, &u)); 731 PetscCall(DMRestoreLocalVector(da, &localF)); 732 PetscCall(DMRestoreLocalVector(da, &localU)); 733 PetscCall(PetscLogFlops(16.0 * xm * ym)); 734 735 /* Destroy AFields appropriately */ 736 f_a += gys; 737 u_a += gys; 738 delete[] f_a; 739 delete[] u_a; 740 delete[] f_c; 741 delete[] u_c; 742 743 PetscFunctionReturn(0); 744 } 745 746 PetscErrorCode IJacobianAdolc(TS ts, PetscReal t, Vec U, Vec Udot, PetscReal a, Mat A, Mat B, void *ctx) { 747 AppCtx *appctx = (AppCtx *)ctx; 748 DM da; 749 const PetscScalar *u_vec; 750 Vec localU; 751 752 PetscFunctionBegin; 753 PetscCall(TSGetDM(ts, &da)); 754 PetscCall(DMGetLocalVector(da, &localU)); 755 756 /* 757 Scatter ghost points to local vector,using the 2-step process 758 DMGlobalToLocalBegin(),DMGlobalToLocalEnd(). 759 By placing code between these two statements, computations can be 760 done while messages are in transition. 761 */ 762 PetscCall(DMGlobalToLocalBegin(da, U, INSERT_VALUES, localU)); 763 PetscCall(DMGlobalToLocalEnd(da, U, INSERT_VALUES, localU)); 764 765 /* Get pointers to vector data */ 766 PetscCall(VecGetArrayRead(localU, &u_vec)); 767 768 /* 769 Compute Jacobian 770 */ 771 PetscCall(PetscAdolcComputeIJacobianLocalIDMass(1, A, u_vec, a, appctx->adctx)); 772 773 /* 774 Restore vectors 775 */ 776 PetscCall(VecRestoreArrayRead(localU, &u_vec)); 777 PetscCall(DMRestoreLocalVector(da, &localU)); 778 PetscFunctionReturn(0); 779 } 780 781 PetscErrorCode IJacobianByHand(TS ts, PetscReal t, Vec U, Vec Udot, PetscReal a, Mat A, Mat B, void *ctx) { 782 AppCtx *appctx = (AppCtx *)ctx; /* user-defined application context */ 783 DM da; 784 PetscInt i, j, Mx, My, xs, ys, xm, ym; 785 PetscReal hx, hy, sx, sy; 786 PetscScalar uc, vc; 787 Field **u; 788 Vec localU; 789 MatStencil stencil[6], rowstencil; 790 PetscScalar entries[6]; 791 792 PetscFunctionBegin; 793 PetscCall(TSGetDM(ts, &da)); 794 PetscCall(DMGetLocalVector(da, &localU)); 795 PetscCall(DMDAGetInfo(da, PETSC_IGNORE, &Mx, &My, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE)); 796 797 hx = 2.50 / (PetscReal)Mx; 798 sx = 1.0 / (hx * hx); 799 hy = 2.50 / (PetscReal)My; 800 sy = 1.0 / (hy * hy); 801 802 /* 803 Scatter ghost points to local vector,using the 2-step process 804 DMGlobalToLocalBegin(),DMGlobalToLocalEnd(). 805 By placing code between these two statements, computations can be 806 done while messages are in transition. 807 */ 808 PetscCall(DMGlobalToLocalBegin(da, U, INSERT_VALUES, localU)); 809 PetscCall(DMGlobalToLocalEnd(da, U, INSERT_VALUES, localU)); 810 811 /* 812 Get pointers to vector data 813 */ 814 PetscCall(DMDAVecGetArrayRead(da, localU, &u)); 815 816 /* 817 Get local grid boundaries 818 */ 819 PetscCall(DMDAGetCorners(da, &xs, &ys, NULL, &xm, &ym, NULL)); 820 821 stencil[0].k = 0; 822 stencil[1].k = 0; 823 stencil[2].k = 0; 824 stencil[3].k = 0; 825 stencil[4].k = 0; 826 stencil[5].k = 0; 827 rowstencil.k = 0; 828 /* 829 Compute function over the locally owned part of the grid 830 */ 831 for (j = ys; j < ys + ym; j++) { 832 stencil[0].j = j - 1; 833 stencil[1].j = j + 1; 834 stencil[2].j = j; 835 stencil[3].j = j; 836 stencil[4].j = j; 837 stencil[5].j = j; 838 rowstencil.k = 0; 839 rowstencil.j = j; 840 for (i = xs; i < xs + xm; i++) { 841 uc = u[j][i].u; 842 vc = u[j][i].v; 843 844 /* uxx = (-2.0*uc + u[j][i-1].u + u[j][i+1].u)*sx; 845 uyy = (-2.0*uc + u[j-1][i].u + u[j+1][i].u)*sy; 846 847 vxx = (-2.0*vc + u[j][i-1].v + u[j][i+1].v)*sx; 848 vyy = (-2.0*vc + u[j-1][i].v + u[j+1][i].v)*sy; 849 f[j][i].u = appctx->D1*(uxx + uyy) - uc*vc*vc + appctx->gamma*(1.0 - uc);*/ 850 851 stencil[0].i = i; 852 stencil[0].c = 0; 853 entries[0] = -appctx->D1 * sy; 854 stencil[1].i = i; 855 stencil[1].c = 0; 856 entries[1] = -appctx->D1 * sy; 857 stencil[2].i = i - 1; 858 stencil[2].c = 0; 859 entries[2] = -appctx->D1 * sx; 860 stencil[3].i = i + 1; 861 stencil[3].c = 0; 862 entries[3] = -appctx->D1 * sx; 863 stencil[4].i = i; 864 stencil[4].c = 0; 865 entries[4] = 2.0 * appctx->D1 * (sx + sy) + vc * vc + appctx->gamma + a; 866 stencil[5].i = i; 867 stencil[5].c = 1; 868 entries[5] = 2.0 * uc * vc; 869 rowstencil.i = i; 870 rowstencil.c = 0; 871 872 PetscCall(MatSetValuesStencil(A, 1, &rowstencil, 6, stencil, entries, INSERT_VALUES)); 873 if (appctx->aijpc) { PetscCall(MatSetValuesStencil(B, 1, &rowstencil, 6, stencil, entries, INSERT_VALUES)); } 874 stencil[0].c = 1; 875 entries[0] = -appctx->D2 * sy; 876 stencil[1].c = 1; 877 entries[1] = -appctx->D2 * sy; 878 stencil[2].c = 1; 879 entries[2] = -appctx->D2 * sx; 880 stencil[3].c = 1; 881 entries[3] = -appctx->D2 * sx; 882 stencil[4].c = 1; 883 entries[4] = 2.0 * appctx->D2 * (sx + sy) - 2.0 * uc * vc + appctx->gamma + appctx->kappa + a; 884 stencil[5].c = 0; 885 entries[5] = -vc * vc; 886 887 PetscCall(MatSetValuesStencil(A, 1, &rowstencil, 6, stencil, entries, INSERT_VALUES)); 888 if (appctx->aijpc) { PetscCall(MatSetValuesStencil(B, 1, &rowstencil, 6, stencil, entries, INSERT_VALUES)); } 889 /* f[j][i].v = appctx->D2*(vxx + vyy) + uc*vc*vc - (appctx->gamma + appctx->kappa)*vc; */ 890 } 891 } 892 893 /* 894 Restore vectors 895 */ 896 PetscCall(PetscLogFlops(19.0 * xm * ym)); 897 PetscCall(DMDAVecRestoreArrayRead(da, localU, &u)); 898 PetscCall(DMRestoreLocalVector(da, &localU)); 899 PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY)); 900 PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY)); 901 PetscCall(MatSetOption(A, MAT_NEW_NONZERO_LOCATION_ERR, PETSC_TRUE)); 902 if (appctx->aijpc) { 903 PetscCall(MatAssemblyBegin(B, MAT_FINAL_ASSEMBLY)); 904 PetscCall(MatAssemblyEnd(B, MAT_FINAL_ASSEMBLY)); 905 PetscCall(MatSetOption(B, MAT_NEW_NONZERO_LOCATION_ERR, PETSC_TRUE)); 906 } 907 PetscFunctionReturn(0); 908 } 909 910 PetscErrorCode RHSJacobianByHand(TS ts, PetscReal t, Vec U, Mat A, Mat B, void *ctx) { 911 AppCtx *appctx = (AppCtx *)ctx; /* user-defined application context */ 912 DM da; 913 PetscInt i, j, Mx, My, xs, ys, xm, ym; 914 PetscReal hx, hy, sx, sy; 915 PetscScalar uc, vc; 916 Field **u; 917 Vec localU; 918 MatStencil stencil[6], rowstencil; 919 PetscScalar entries[6]; 920 921 PetscFunctionBegin; 922 PetscCall(TSGetDM(ts, &da)); 923 PetscCall(DMGetLocalVector(da, &localU)); 924 PetscCall(DMDAGetInfo(da, PETSC_IGNORE, &Mx, &My, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE)); 925 926 hx = 2.50 / (PetscReal)(Mx); 927 sx = 1.0 / (hx * hx); 928 hy = 2.50 / (PetscReal)(My); 929 sy = 1.0 / (hy * hy); 930 931 /* 932 Scatter ghost points to local vector,using the 2-step process 933 DMGlobalToLocalBegin(),DMGlobalToLocalEnd(). 934 By placing code between these two statements, computations can be 935 done while messages are in transition. 936 */ 937 PetscCall(DMGlobalToLocalBegin(da, U, INSERT_VALUES, localU)); 938 PetscCall(DMGlobalToLocalEnd(da, U, INSERT_VALUES, localU)); 939 940 /* 941 Get pointers to vector data 942 */ 943 PetscCall(DMDAVecGetArrayRead(da, localU, &u)); 944 945 /* 946 Get local grid boundaries 947 */ 948 PetscCall(DMDAGetCorners(da, &xs, &ys, NULL, &xm, &ym, NULL)); 949 950 stencil[0].k = 0; 951 stencil[1].k = 0; 952 stencil[2].k = 0; 953 stencil[3].k = 0; 954 stencil[4].k = 0; 955 stencil[5].k = 0; 956 rowstencil.k = 0; 957 958 /* 959 Compute function over the locally owned part of the grid 960 */ 961 for (j = ys; j < ys + ym; j++) { 962 stencil[0].j = j - 1; 963 stencil[1].j = j + 1; 964 stencil[2].j = j; 965 stencil[3].j = j; 966 stencil[4].j = j; 967 stencil[5].j = j; 968 rowstencil.k = 0; 969 rowstencil.j = j; 970 for (i = xs; i < xs + xm; i++) { 971 uc = u[j][i].u; 972 vc = u[j][i].v; 973 974 /* uxx = (-2.0*uc + u[j][i-1].u + u[j][i+1].u)*sx; 975 uyy = (-2.0*uc + u[j-1][i].u + u[j+1][i].u)*sy; 976 977 vxx = (-2.0*vc + u[j][i-1].v + u[j][i+1].v)*sx; 978 vyy = (-2.0*vc + u[j-1][i].v + u[j+1][i].v)*sy; 979 f[j][i].u = appctx->D1*(uxx + uyy) - uc*vc*vc + appctx->gamma*(1.0 - uc);*/ 980 981 stencil[0].i = i; 982 stencil[0].c = 0; 983 entries[0] = appctx->D1 * sy; 984 stencil[1].i = i; 985 stencil[1].c = 0; 986 entries[1] = appctx->D1 * sy; 987 stencil[2].i = i - 1; 988 stencil[2].c = 0; 989 entries[2] = appctx->D1 * sx; 990 stencil[3].i = i + 1; 991 stencil[3].c = 0; 992 entries[3] = appctx->D1 * sx; 993 stencil[4].i = i; 994 stencil[4].c = 0; 995 entries[4] = -2.0 * appctx->D1 * (sx + sy) - vc * vc - appctx->gamma; 996 stencil[5].i = i; 997 stencil[5].c = 1; 998 entries[5] = -2.0 * uc * vc; 999 rowstencil.i = i; 1000 rowstencil.c = 0; 1001 1002 PetscCall(MatSetValuesStencil(A, 1, &rowstencil, 6, stencil, entries, INSERT_VALUES)); 1003 1004 stencil[0].c = 1; 1005 entries[0] = appctx->D2 * sy; 1006 stencil[1].c = 1; 1007 entries[1] = appctx->D2 * sy; 1008 stencil[2].c = 1; 1009 entries[2] = appctx->D2 * sx; 1010 stencil[3].c = 1; 1011 entries[3] = appctx->D2 * sx; 1012 stencil[4].c = 1; 1013 entries[4] = -2.0 * appctx->D2 * (sx + sy) + 2.0 * uc * vc - appctx->gamma - appctx->kappa; 1014 stencil[5].c = 0; 1015 entries[5] = vc * vc; 1016 rowstencil.c = 1; 1017 1018 PetscCall(MatSetValuesStencil(A, 1, &rowstencil, 6, stencil, entries, INSERT_VALUES)); 1019 /* f[j][i].v = appctx->D2*(vxx + vyy) + uc*vc*vc - (appctx->gamma + appctx->kappa)*vc; */ 1020 } 1021 } 1022 1023 /* 1024 Restore vectors 1025 */ 1026 PetscCall(PetscLogFlops(19.0 * xm * ym)); 1027 PetscCall(DMDAVecRestoreArrayRead(da, localU, &u)); 1028 PetscCall(DMRestoreLocalVector(da, &localU)); 1029 PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY)); 1030 PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY)); 1031 PetscCall(MatSetOption(A, MAT_NEW_NONZERO_LOCATION_ERR, PETSC_TRUE)); 1032 if (appctx->aijpc) { 1033 PetscCall(MatAssemblyBegin(B, MAT_FINAL_ASSEMBLY)); 1034 PetscCall(MatAssemblyEnd(B, MAT_FINAL_ASSEMBLY)); 1035 PetscCall(MatSetOption(B, MAT_NEW_NONZERO_LOCATION_ERR, PETSC_TRUE)); 1036 } 1037 PetscFunctionReturn(0); 1038 } 1039 1040 PetscErrorCode RHSJacobianAdolc(TS ts, PetscReal t, Vec U, Mat A, Mat B, void *ctx) { 1041 AppCtx *appctx = (AppCtx *)ctx; 1042 DM da; 1043 PetscScalar *u_vec; 1044 Vec localU; 1045 1046 PetscFunctionBegin; 1047 PetscCall(TSGetDM(ts, &da)); 1048 PetscCall(DMGetLocalVector(da, &localU)); 1049 1050 /* 1051 Scatter ghost points to local vector,using the 2-step process 1052 DMGlobalToLocalBegin(),DMGlobalToLocalEnd(). 1053 By placing code between these two statements, computations can be 1054 done while messages are in transition. 1055 */ 1056 PetscCall(DMGlobalToLocalBegin(da, U, INSERT_VALUES, localU)); 1057 PetscCall(DMGlobalToLocalEnd(da, U, INSERT_VALUES, localU)); 1058 1059 /* Get pointers to vector data */ 1060 PetscCall(VecGetArray(localU, &u_vec)); 1061 1062 /* 1063 Compute Jacobian 1064 */ 1065 PetscCall(PetscAdolcComputeRHSJacobianLocal(1, A, u_vec, appctx->adctx)); 1066 1067 /* 1068 Restore vectors 1069 */ 1070 PetscCall(VecRestoreArray(localU, &u_vec)); 1071 PetscCall(DMRestoreLocalVector(da, &localU)); 1072 PetscFunctionReturn(0); 1073 } 1074 1075 /*TEST 1076 1077 build: 1078 requires: double !complex adolc colpack 1079 1080 test: 1081 suffix: 1 1082 nsize: 1 1083 args: -ts_max_steps 1 -da_grid_x 12 -da_grid_y 12 -snes_test_jacobian 1084 output_file: output/adr_ex5adj_1.out 1085 1086 test: 1087 suffix: 2 1088 nsize: 1 1089 args: -ts_max_steps 1 -da_grid_x 12 -da_grid_y 12 -snes_test_jacobian -implicitform 1090 output_file: output/adr_ex5adj_2.out 1091 1092 test: 1093 suffix: 3 1094 nsize: 4 1095 args: -ts_max_steps 10 -da_grid_x 12 -da_grid_y 12 -ts_monitor -ts_adjoint_monitor 1096 output_file: output/adr_ex5adj_3.out 1097 1098 test: 1099 suffix: 4 1100 nsize: 4 1101 args: -ts_max_steps 10 -da_grid_x 12 -da_grid_y 12 -ts_monitor -ts_adjoint_monitor -implicitform 1102 output_file: output/adr_ex5adj_4.out 1103 1104 testset: 1105 suffix: 5 1106 nsize: 4 1107 args: -ts_max_steps 10 -da_grid_x 15 -da_grid_y 15 -ts_monitor -ts_adjoint_monitor -adolc_sparse 1108 output_file: output/adr_ex5adj_5.out 1109 1110 testset: 1111 suffix: 6 1112 nsize: 4 1113 args: -ts_max_steps 10 -da_grid_x 15 -da_grid_y 15 -ts_monitor -ts_adjoint_monitor -adolc_sparse -implicitform 1114 output_file: output/adr_ex5adj_6.out 1115 1116 TEST*/ 1117