1 2 static char help[] = "Time-dependent PDE in 2d. Simplified from ex7.c for illustrating how to use TS on a structured domain. \n"; 3 /* 4 u_t = uxx + uyy 5 0 < x < 1, 0 < y < 1; 6 At t=0: u(x,y) = exp(c*r*r*r), if r=PetscSqrtReal((x-.5)*(x-.5) + (y-.5)*(y-.5)) < .125 7 u(x,y) = 0.0 if r >= .125 8 9 mpiexec -n 2 ./ex13 -da_grid_x 40 -da_grid_y 40 -ts_max_steps 2 -snes_monitor -ksp_monitor 10 mpiexec -n 1 ./ex13 -snes_fd_color -ts_monitor_draw_solution 11 mpiexec -n 2 ./ex13 -ts_type sundials -ts_monitor 12 */ 13 14 #include <petscdm.h> 15 #include <petscdmda.h> 16 #include <petscts.h> 17 18 /* 19 User-defined data structures and routines 20 */ 21 typedef struct { 22 PetscReal c; 23 } AppCtx; 24 25 extern PetscErrorCode RHSFunction(TS, PetscReal, Vec, Vec, void *); 26 extern PetscErrorCode RHSJacobian(TS, PetscReal, Vec, Mat, Mat, void *); 27 extern PetscErrorCode FormInitialSolution(DM, Vec, void *); 28 29 int main(int argc, char **argv) { 30 TS ts; /* nonlinear solver */ 31 Vec u, r; /* solution, residual vector */ 32 Mat J; /* Jacobian matrix */ 33 PetscInt steps; /* iterations for convergence */ 34 DM da; 35 PetscReal ftime, dt; 36 AppCtx user; /* user-defined work context */ 37 38 PetscFunctionBeginUser; 39 PetscCall(PetscInitialize(&argc, &argv, (char *)0, help)); 40 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 41 Create distributed array (DMDA) to manage parallel grid and vectors 42 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 43 PetscCall(DMDACreate2d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DMDA_STENCIL_STAR, 8, 8, PETSC_DECIDE, PETSC_DECIDE, 1, 1, NULL, NULL, &da)); 44 PetscCall(DMSetFromOptions(da)); 45 PetscCall(DMSetUp(da)); 46 47 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 48 Extract global vectors from DMDA; 49 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 50 PetscCall(DMCreateGlobalVector(da, &u)); 51 PetscCall(VecDuplicate(u, &r)); 52 53 /* Initialize user application context */ 54 user.c = -30.0; 55 56 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 57 Create timestepping solver context 58 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 59 PetscCall(TSCreate(PETSC_COMM_WORLD, &ts)); 60 PetscCall(TSSetDM(ts, da)); 61 PetscCall(TSSetType(ts, TSBEULER)); 62 PetscCall(TSSetRHSFunction(ts, r, RHSFunction, &user)); 63 64 /* Set Jacobian */ 65 PetscCall(DMSetMatType(da, MATAIJ)); 66 PetscCall(DMCreateMatrix(da, &J)); 67 PetscCall(TSSetRHSJacobian(ts, J, J, RHSJacobian, NULL)); 68 69 ftime = 1.0; 70 PetscCall(TSSetMaxTime(ts, ftime)); 71 PetscCall(TSSetExactFinalTime(ts, TS_EXACTFINALTIME_STEPOVER)); 72 73 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 74 Set initial conditions 75 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 76 PetscCall(FormInitialSolution(da, u, &user)); 77 dt = .01; 78 PetscCall(TSSetTimeStep(ts, dt)); 79 80 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 81 Set runtime options 82 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 83 PetscCall(TSSetFromOptions(ts)); 84 85 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 86 Solve nonlinear system 87 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 88 PetscCall(TSSolve(ts, u)); 89 PetscCall(TSGetSolveTime(ts, &ftime)); 90 PetscCall(TSGetStepNumber(ts, &steps)); 91 92 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 93 Free work space. 94 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 95 PetscCall(MatDestroy(&J)); 96 PetscCall(VecDestroy(&u)); 97 PetscCall(VecDestroy(&r)); 98 PetscCall(TSDestroy(&ts)); 99 PetscCall(DMDestroy(&da)); 100 101 PetscCall(PetscFinalize()); 102 return 0; 103 } 104 /* ------------------------------------------------------------------- */ 105 /* 106 RHSFunction - Evaluates nonlinear function, F(u). 107 108 Input Parameters: 109 . ts - the TS context 110 . U - input vector 111 . ptr - optional user-defined context, as set by TSSetFunction() 112 113 Output Parameter: 114 . F - function vector 115 */ 116 PetscErrorCode RHSFunction(TS ts, PetscReal ftime, Vec U, Vec F, void *ptr) { 117 /* PETSC_UNUSED AppCtx *user=(AppCtx*)ptr; */ 118 DM da; 119 PetscInt i, j, Mx, My, xs, ys, xm, ym; 120 PetscReal two = 2.0, hx, hy, sx, sy; 121 PetscScalar u, uxx, uyy, **uarray, **f; 122 Vec localU; 123 124 PetscFunctionBeginUser; 125 PetscCall(TSGetDM(ts, &da)); 126 PetscCall(DMGetLocalVector(da, &localU)); 127 PetscCall(DMDAGetInfo(da, PETSC_IGNORE, &Mx, &My, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE)); 128 129 hx = 1.0 / (PetscReal)(Mx - 1); 130 sx = 1.0 / (hx * hx); 131 hy = 1.0 / (PetscReal)(My - 1); 132 sy = 1.0 / (hy * hy); 133 134 /* 135 Scatter ghost points to local vector,using the 2-step process 136 DMGlobalToLocalBegin(),DMGlobalToLocalEnd(). 137 By placing code between these two statements, computations can be 138 done while messages are in transition. 139 */ 140 PetscCall(DMGlobalToLocalBegin(da, U, INSERT_VALUES, localU)); 141 PetscCall(DMGlobalToLocalEnd(da, U, INSERT_VALUES, localU)); 142 143 /* Get pointers to vector data */ 144 PetscCall(DMDAVecGetArrayRead(da, localU, &uarray)); 145 PetscCall(DMDAVecGetArray(da, F, &f)); 146 147 /* Get local grid boundaries */ 148 PetscCall(DMDAGetCorners(da, &xs, &ys, NULL, &xm, &ym, NULL)); 149 150 /* Compute function over the locally owned part of the grid */ 151 for (j = ys; j < ys + ym; j++) { 152 for (i = xs; i < xs + xm; i++) { 153 if (i == 0 || j == 0 || i == Mx - 1 || j == My - 1) { 154 f[j][i] = uarray[j][i]; 155 continue; 156 } 157 u = uarray[j][i]; 158 uxx = (-two * u + uarray[j][i - 1] + uarray[j][i + 1]) * sx; 159 uyy = (-two * u + uarray[j - 1][i] + uarray[j + 1][i]) * sy; 160 f[j][i] = uxx + uyy; 161 } 162 } 163 164 /* Restore vectors */ 165 PetscCall(DMDAVecRestoreArrayRead(da, localU, &uarray)); 166 PetscCall(DMDAVecRestoreArray(da, F, &f)); 167 PetscCall(DMRestoreLocalVector(da, &localU)); 168 PetscCall(PetscLogFlops(11.0 * ym * xm)); 169 PetscFunctionReturn(0); 170 } 171 172 /* --------------------------------------------------------------------- */ 173 /* 174 RHSJacobian - User-provided routine to compute the Jacobian of 175 the nonlinear right-hand-side function of the ODE. 176 177 Input Parameters: 178 ts - the TS context 179 t - current time 180 U - global input vector 181 dummy - optional user-defined context, as set by TSetRHSJacobian() 182 183 Output Parameters: 184 J - Jacobian matrix 185 Jpre - optionally different preconditioning matrix 186 str - flag indicating matrix structure 187 */ 188 PetscErrorCode RHSJacobian(TS ts, PetscReal t, Vec U, Mat J, Mat Jpre, void *ctx) { 189 DM da; 190 DMDALocalInfo info; 191 PetscInt i, j; 192 PetscReal hx, hy, sx, sy; 193 194 PetscFunctionBeginUser; 195 PetscCall(TSGetDM(ts, &da)); 196 PetscCall(DMDAGetLocalInfo(da, &info)); 197 hx = 1.0 / (PetscReal)(info.mx - 1); 198 sx = 1.0 / (hx * hx); 199 hy = 1.0 / (PetscReal)(info.my - 1); 200 sy = 1.0 / (hy * hy); 201 for (j = info.ys; j < info.ys + info.ym; j++) { 202 for (i = info.xs; i < info.xs + info.xm; i++) { 203 PetscInt nc = 0; 204 MatStencil row, col[5]; 205 PetscScalar val[5]; 206 row.i = i; 207 row.j = j; 208 if (i == 0 || j == 0 || i == info.mx - 1 || j == info.my - 1) { 209 col[nc].i = i; 210 col[nc].j = j; 211 val[nc++] = 1.0; 212 } else { 213 col[nc].i = i - 1; 214 col[nc].j = j; 215 val[nc++] = sx; 216 col[nc].i = i + 1; 217 col[nc].j = j; 218 val[nc++] = sx; 219 col[nc].i = i; 220 col[nc].j = j - 1; 221 val[nc++] = sy; 222 col[nc].i = i; 223 col[nc].j = j + 1; 224 val[nc++] = sy; 225 col[nc].i = i; 226 col[nc].j = j; 227 val[nc++] = -2 * sx - 2 * sy; 228 } 229 PetscCall(MatSetValuesStencil(Jpre, 1, &row, nc, col, val, INSERT_VALUES)); 230 } 231 } 232 PetscCall(MatAssemblyBegin(Jpre, MAT_FINAL_ASSEMBLY)); 233 PetscCall(MatAssemblyEnd(Jpre, MAT_FINAL_ASSEMBLY)); 234 if (J != Jpre) { 235 PetscCall(MatAssemblyBegin(J, MAT_FINAL_ASSEMBLY)); 236 PetscCall(MatAssemblyEnd(J, MAT_FINAL_ASSEMBLY)); 237 } 238 PetscFunctionReturn(0); 239 } 240 241 /* ------------------------------------------------------------------- */ 242 PetscErrorCode FormInitialSolution(DM da, Vec U, void *ptr) { 243 AppCtx *user = (AppCtx *)ptr; 244 PetscReal c = user->c; 245 PetscInt i, j, xs, ys, xm, ym, Mx, My; 246 PetscScalar **u; 247 PetscReal hx, hy, x, y, r; 248 249 PetscFunctionBeginUser; 250 PetscCall(DMDAGetInfo(da, PETSC_IGNORE, &Mx, &My, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE)); 251 252 hx = 1.0 / (PetscReal)(Mx - 1); 253 hy = 1.0 / (PetscReal)(My - 1); 254 255 /* Get pointers to vector data */ 256 PetscCall(DMDAVecGetArray(da, U, &u)); 257 258 /* Get local grid boundaries */ 259 PetscCall(DMDAGetCorners(da, &xs, &ys, NULL, &xm, &ym, NULL)); 260 261 /* Compute function over the locally owned part of the grid */ 262 for (j = ys; j < ys + ym; j++) { 263 y = j * hy; 264 for (i = xs; i < xs + xm; i++) { 265 x = i * hx; 266 r = PetscSqrtReal((x - .5) * (x - .5) + (y - .5) * (y - .5)); 267 if (r < .125) u[j][i] = PetscExpReal(c * r * r * r); 268 else u[j][i] = 0.0; 269 } 270 } 271 272 /* Restore vectors */ 273 PetscCall(DMDAVecRestoreArray(da, U, &u)); 274 PetscFunctionReturn(0); 275 } 276 277 /*TEST 278 279 test: 280 args: -ts_max_steps 5 -ts_monitor 281 282 test: 283 suffix: 2 284 args: -ts_max_steps 5 -ts_monitor 285 286 test: 287 suffix: 3 288 args: -ts_max_steps 5 -snes_fd_color -ts_monitor 289 290 TEST*/ 291