xref: /petsc/src/ts/tests/ex24.c (revision 58d68138c660dfb4e9f5b03334792cd4f2ffd7cc)
1 static char help[] = "Test TSComputeIJacobian()\n\n";
2 
3 #include <petscsys.h>
4 #include <petscdm.h>
5 #include <petscdmda.h>
6 #include <petscts.h>
7 
8 typedef struct {
9   PetscScalar u, v;
10 } Field;
11 
12 typedef struct {
13   PetscReal D1, D2, gamma, kappa;
14 } AppCtx;
15 
16 PetscErrorCode RHSJacobian(TS ts, PetscReal t, Vec U, Mat A, Mat BB, void *ctx) {
17   AppCtx     *appctx = (AppCtx *)ctx; /* user-defined application context */
18   DM          da;
19   PetscInt    i, j, Mx, My, xs, ys, xm, ym;
20   PetscReal   hx, hy, sx, sy;
21   PetscScalar uc, vc;
22   Field     **u;
23   Vec         localU;
24   MatStencil  stencil[6], rowstencil;
25   PetscScalar entries[6];
26 
27   PetscFunctionBeginUser;
28   PetscCall(TSGetDM(ts, &da));
29   PetscCall(DMGetLocalVector(da, &localU));
30   PetscCall(DMDAGetInfo(da, PETSC_IGNORE, &Mx, &My, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE));
31 
32   hx = 2.50 / (PetscReal)Mx;
33   sx = 1.0 / (hx * hx);
34   hy = 2.50 / (PetscReal)My;
35   sy = 1.0 / (hy * hy);
36 
37   /*
38      Scatter ghost points to local vector,using the 2-step process
39         DMGlobalToLocalBegin(),DMGlobalToLocalEnd().
40      By placing code between these two statements, computations can be
41      done while messages are in transition.
42   */
43   PetscCall(DMGlobalToLocalBegin(da, U, INSERT_VALUES, localU));
44   PetscCall(DMGlobalToLocalEnd(da, U, INSERT_VALUES, localU));
45 
46   /*
47      Get pointers to vector data
48   */
49   PetscCall(DMDAVecGetArrayRead(da, localU, &u));
50 
51   /*
52      Get local grid boundaries
53   */
54   PetscCall(DMDAGetCorners(da, &xs, &ys, NULL, &xm, &ym, NULL));
55 
56   stencil[0].k = 0;
57   stencil[1].k = 0;
58   stencil[2].k = 0;
59   stencil[3].k = 0;
60   stencil[4].k = 0;
61   stencil[5].k = 0;
62   rowstencil.k = 0;
63   /*
64      Compute function over the locally owned part of the grid
65   */
66   for (j = ys; j < ys + ym; j++) {
67     stencil[0].j = j - 1;
68     stencil[1].j = j + 1;
69     stencil[2].j = j;
70     stencil[3].j = j;
71     stencil[4].j = j;
72     stencil[5].j = j;
73     rowstencil.k = 0;
74     rowstencil.j = j;
75     for (i = xs; i < xs + xm; i++) {
76       uc = u[j][i].u;
77       vc = u[j][i].v;
78 
79       /*      uxx       = (-2.0*uc + u[j][i-1].u + u[j][i+1].u)*sx;
80       uyy       = (-2.0*uc + u[j-1][i].u + u[j+1][i].u)*sy;
81 
82       vxx       = (-2.0*vc + u[j][i-1].v + u[j][i+1].v)*sx;
83       vyy       = (-2.0*vc + u[j-1][i].v + u[j+1][i].v)*sy;
84        f[j][i].u = appctx->D1*(uxx + uyy) - uc*vc*vc + appctx->gamma*(1.0 - uc);*/
85 
86       stencil[0].i = i;
87       stencil[0].c = 0;
88       entries[0]   = appctx->D1 * sy;
89       stencil[1].i = i;
90       stencil[1].c = 0;
91       entries[1]   = appctx->D1 * sy;
92       stencil[2].i = i - 1;
93       stencil[2].c = 0;
94       entries[2]   = appctx->D1 * sx;
95       stencil[3].i = i + 1;
96       stencil[3].c = 0;
97       entries[3]   = appctx->D1 * sx;
98       stencil[4].i = i;
99       stencil[4].c = 0;
100       entries[4]   = -2.0 * appctx->D1 * (sx + sy) - vc * vc - appctx->gamma;
101       stencil[5].i = i;
102       stencil[5].c = 1;
103       entries[5]   = -2.0 * uc * vc;
104       rowstencil.i = i;
105       rowstencil.c = 0;
106 
107       PetscCall(MatSetValuesStencil(A, 1, &rowstencil, 6, stencil, entries, INSERT_VALUES));
108       stencil[0].c = 1;
109       entries[0]   = appctx->D2 * sy;
110       stencil[1].c = 1;
111       entries[1]   = appctx->D2 * sy;
112       stencil[2].c = 1;
113       entries[2]   = appctx->D2 * sx;
114       stencil[3].c = 1;
115       entries[3]   = appctx->D2 * sx;
116       stencil[4].c = 1;
117       entries[4]   = -2.0 * appctx->D2 * (sx + sy) + 2.0 * uc * vc - appctx->gamma - appctx->kappa;
118       stencil[5].c = 0;
119       entries[5]   = vc * vc;
120       rowstencil.c = 1;
121 
122       PetscCall(MatSetValuesStencil(A, 1, &rowstencil, 6, stencil, entries, INSERT_VALUES));
123       /* f[j][i].v = appctx->D2*(vxx + vyy) + uc*vc*vc - (appctx->gamma + appctx->kappa)*vc; */
124     }
125   }
126 
127   /*
128      Restore vectors
129   */
130   PetscCall(PetscLogFlops(19 * xm * ym));
131   PetscCall(DMDAVecRestoreArrayRead(da, localU, &u));
132   PetscCall(DMRestoreLocalVector(da, &localU));
133   PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY));
134   PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY));
135   PetscCall(MatSetOption(A, MAT_NEW_NONZERO_LOCATION_ERR, PETSC_TRUE));
136   PetscFunctionReturn(0);
137 }
138 
139 PetscErrorCode InitialConditions(DM da, Vec U) {
140   PetscInt  i, j, xs, ys, xm, ym, Mx, My;
141   Field   **u;
142   PetscReal hx, hy, x, y;
143 
144   PetscFunctionBeginUser;
145   PetscCall(DMDAGetInfo(da, PETSC_IGNORE, &Mx, &My, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE));
146 
147   hx = 2.5 / (PetscReal)Mx;
148   hy = 2.5 / (PetscReal)My;
149 
150   /*
151      Get pointers to vector data
152   */
153   PetscCall(DMDAVecGetArray(da, U, &u));
154 
155   /*
156      Get local grid boundaries
157   */
158   PetscCall(DMDAGetCorners(da, &xs, &ys, NULL, &xm, &ym, NULL));
159 
160   /*
161      Compute function over the locally owned part of the grid
162   */
163   for (j = ys; j < ys + ym; j++) {
164     y = j * hy;
165     for (i = xs; i < xs + xm; i++) {
166       x = i * hx;
167       if ((1.0 <= x) && (x <= 1.5) && (1.0 <= y) && (y <= 1.5)) u[j][i].v = .25 * PetscPowReal(PetscSinReal(4.0 * PETSC_PI * x), 2.0) * PetscPowReal(PetscSinReal(4.0 * PETSC_PI * y), 2.0);
168       else u[j][i].v = 0.0;
169 
170       u[j][i].u = 1.0 - 2.0 * u[j][i].v;
171     }
172   }
173 
174   /*
175      Restore vectors
176   */
177   PetscCall(DMDAVecRestoreArray(da, U, &u));
178   PetscFunctionReturn(0);
179 }
180 
181 int main(int argc, char **argv) {
182   TS        ts;
183   Vec       U, Udot;
184   Mat       Jac, Jac2;
185   DM        da;
186   AppCtx    appctx;
187   PetscReal t = 0, shift, norm;
188 
189   PetscFunctionBeginUser;
190   PetscCall(PetscInitialize(&argc, &argv, (char *)0, help));
191 
192   appctx.D1    = 8.0e-5;
193   appctx.D2    = 4.0e-5;
194   appctx.gamma = .024;
195   appctx.kappa = .06;
196 
197   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
198      Create distributed array (DMDA) to manage parallel grid and vectors
199   - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
200   PetscCall(DMDACreate2d(PETSC_COMM_WORLD, DM_BOUNDARY_PERIODIC, DM_BOUNDARY_PERIODIC, DMDA_STENCIL_STAR, 64, 64, PETSC_DECIDE, PETSC_DECIDE, 2, 1, NULL, NULL, &da));
201   PetscCall(DMSetFromOptions(da));
202   PetscCall(DMSetUp(da));
203   PetscCall(DMDASetFieldName(da, 0, "u"));
204   PetscCall(DMDASetFieldName(da, 1, "v"));
205   PetscCall(DMSetMatType(da, MATAIJ));
206   PetscCall(DMCreateMatrix(da, &Jac));
207   PetscCall(DMCreateMatrix(da, &Jac2));
208 
209   /*  - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
210      Extract global vectors from DMDA; then duplicate for remaining
211      vectors that are the same types
212    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
213   PetscCall(DMCreateGlobalVector(da, &U));
214   PetscCall(VecDuplicate(U, &Udot));
215   PetscCall(VecSet(Udot, 0.0));
216 
217   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
218      Create timestepping solver context
219      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
220   PetscCall(TSCreate(PETSC_COMM_WORLD, &ts));
221   PetscCall(TSSetDM(ts, da));
222   PetscCall(TSSetProblemType(ts, TS_NONLINEAR));
223   PetscCall(TSSetRHSJacobian(ts, Jac, Jac, RHSJacobian, &appctx));
224 
225   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
226      Set initial conditions
227    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
228   PetscCall(InitialConditions(da, U));
229   PetscCall(TSSetSolution(ts, U));
230   PetscCall(TSSetFromOptions(ts));
231   PetscCall(TSSetUp(ts));
232 
233   shift = 2.;
234   PetscCall(TSComputeIJacobian(ts, t, U, Udot, shift, Jac2, Jac2, PETSC_FALSE));
235   shift = 1.;
236   PetscCall(TSComputeIJacobian(ts, t, U, Udot, shift, Jac, Jac, PETSC_FALSE));
237   shift = 2.;
238   PetscCall(TSComputeIJacobian(ts, t, U, Udot, shift, Jac, Jac, PETSC_FALSE));
239   PetscCall(MatAXPY(Jac, -1, Jac2, SAME_NONZERO_PATTERN));
240   PetscCall(MatNorm(Jac, NORM_INFINITY, &norm));
241   if (norm > 100.0 * PETSC_MACHINE_EPSILON) { PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Error Norm %g \n Incorrect behaviour of TSComputeIJacobian(). The two matrices should have the same results.\n", (double)norm)); }
242   PetscCall(MatDestroy(&Jac));
243   PetscCall(MatDestroy(&Jac2));
244   PetscCall(VecDestroy(&U));
245   PetscCall(VecDestroy(&Udot));
246   PetscCall(TSDestroy(&ts));
247   PetscCall(DMDestroy(&da));
248   PetscCall(PetscFinalize());
249   return 0;
250 }
251 
252 /*TEST
253   test:
254 
255 TEST*/
256