xref: /petsc/src/ts/tests/ex24.c (revision efbe7e8a80d07327753dbe0b33efee01e046af3f)
1 static char help[] = "Test TSComputeIJacobian()\n\n";
2 
3 #include <petscsys.h>
4 #include <petscdm.h>
5 #include <petscdmda.h>
6 #include <petscts.h>
7 
8 typedef struct {
9   PetscScalar u,v;
10 } Field;
11 
12 typedef struct {
13   PetscReal D1,D2,gamma,kappa;
14 } AppCtx;
15 
16 
17 PetscErrorCode RHSJacobian(TS ts,PetscReal t,Vec U,Mat A,Mat BB,void *ctx)
18 {
19   AppCtx         *appctx = (AppCtx*)ctx;     /* user-defined application context */
20   DM             da;
21   PetscErrorCode ierr;
22   PetscInt       i,j,Mx,My,xs,ys,xm,ym;
23   PetscReal      hx,hy,sx,sy;
24   PetscScalar    uc,vc;
25   Field          **u;
26   Vec            localU;
27   MatStencil     stencil[6],rowstencil;
28   PetscScalar    entries[6];
29 
30   PetscFunctionBegin;
31   ierr = TSGetDM(ts,&da);CHKERRQ(ierr);
32   ierr = DMGetLocalVector(da,&localU);CHKERRQ(ierr);
33   ierr = DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE);CHKERRQ(ierr);
34 
35   hx = 2.50/(PetscReal)Mx; sx = 1.0/(hx*hx);
36   hy = 2.50/(PetscReal)My; sy = 1.0/(hy*hy);
37 
38   /*
39      Scatter ghost points to local vector,using the 2-step process
40         DMGlobalToLocalBegin(),DMGlobalToLocalEnd().
41      By placing code between these two statements, computations can be
42      done while messages are in transition.
43   */
44   ierr = DMGlobalToLocalBegin(da,U,INSERT_VALUES,localU);CHKERRQ(ierr);
45   ierr = DMGlobalToLocalEnd(da,U,INSERT_VALUES,localU);CHKERRQ(ierr);
46 
47   /*
48      Get pointers to vector data
49   */
50   ierr = DMDAVecGetArrayRead(da,localU,&u);CHKERRQ(ierr);
51 
52   /*
53      Get local grid boundaries
54   */
55   ierr = DMDAGetCorners(da,&xs,&ys,NULL,&xm,&ym,NULL);CHKERRQ(ierr);
56 
57   stencil[0].k = 0;
58   stencil[1].k = 0;
59   stencil[2].k = 0;
60   stencil[3].k = 0;
61   stencil[4].k = 0;
62   stencil[5].k = 0;
63   rowstencil.k = 0;
64   /*
65      Compute function over the locally owned part of the grid
66   */
67   for (j=ys; j<ys+ym; j++) {
68 
69     stencil[0].j = j-1;
70     stencil[1].j = j+1;
71     stencil[2].j = j;
72     stencil[3].j = j;
73     stencil[4].j = j;
74     stencil[5].j = j;
75     rowstencil.k = 0; rowstencil.j = j;
76     for (i=xs; i<xs+xm; i++) {
77       uc = u[j][i].u;
78       vc = u[j][i].v;
79 
80       /*      uxx       = (-2.0*uc + u[j][i-1].u + u[j][i+1].u)*sx;
81       uyy       = (-2.0*uc + u[j-1][i].u + u[j+1][i].u)*sy;
82 
83       vxx       = (-2.0*vc + u[j][i-1].v + u[j][i+1].v)*sx;
84       vyy       = (-2.0*vc + u[j-1][i].v + u[j+1][i].v)*sy;
85        f[j][i].u = appctx->D1*(uxx + uyy) - uc*vc*vc + appctx->gamma*(1.0 - uc);*/
86 
87       stencil[0].i = i; stencil[0].c = 0; entries[0] = appctx->D1*sy;
88       stencil[1].i = i; stencil[1].c = 0; entries[1] = appctx->D1*sy;
89       stencil[2].i = i-1; stencil[2].c = 0; entries[2] = appctx->D1*sx;
90       stencil[3].i = i+1; stencil[3].c = 0; entries[3] = appctx->D1*sx;
91       stencil[4].i = i; stencil[4].c = 0; entries[4] = -2.0*appctx->D1*(sx + sy) - vc*vc - appctx->gamma;
92       stencil[5].i = i; stencil[5].c = 1; entries[5] = -2.0*uc*vc;
93       rowstencil.i = i; rowstencil.c = 0;
94 
95       ierr = MatSetValuesStencil(A,1,&rowstencil,6,stencil,entries,INSERT_VALUES);CHKERRQ(ierr);
96       stencil[0].c = 1; entries[0] = appctx->D2*sy;
97       stencil[1].c = 1; entries[1] = appctx->D2*sy;
98       stencil[2].c = 1; entries[2] = appctx->D2*sx;
99       stencil[3].c = 1; entries[3] = appctx->D2*sx;
100       stencil[4].c = 1; entries[4] = -2.0*appctx->D2*(sx + sy) + 2.0*uc*vc - appctx->gamma - appctx->kappa;
101       stencil[5].c = 0; entries[5] = vc*vc;
102       rowstencil.c = 1;
103 
104       ierr = MatSetValuesStencil(A,1,&rowstencil,6,stencil,entries,INSERT_VALUES);CHKERRQ(ierr);
105       /* f[j][i].v = appctx->D2*(vxx + vyy) + uc*vc*vc - (appctx->gamma + appctx->kappa)*vc; */
106     }
107   }
108 
109   /*
110      Restore vectors
111   */
112   ierr = PetscLogFlops(19*xm*ym);CHKERRQ(ierr);
113   ierr = DMDAVecRestoreArrayRead(da,localU,&u);CHKERRQ(ierr);
114   ierr = DMRestoreLocalVector(da,&localU);CHKERRQ(ierr);
115   ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
116   ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
117   ierr = MatSetOption(A,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE);CHKERRQ(ierr);
118   PetscFunctionReturn(0);
119 }
120 
121 PetscErrorCode InitialConditions(DM da,Vec U)
122 {
123   PetscErrorCode ierr;
124   PetscInt       i,j,xs,ys,xm,ym,Mx,My;
125   Field          **u;
126   PetscReal      hx,hy,x,y;
127 
128   PetscFunctionBegin;
129   ierr = DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE);CHKERRQ(ierr);
130 
131   hx = 2.5/(PetscReal)Mx;
132   hy = 2.5/(PetscReal)My;
133 
134   /*
135      Get pointers to vector data
136   */
137   ierr = DMDAVecGetArray(da,U,&u);CHKERRQ(ierr);
138 
139   /*
140      Get local grid boundaries
141   */
142   ierr = DMDAGetCorners(da,&xs,&ys,NULL,&xm,&ym,NULL);CHKERRQ(ierr);
143 
144   /*
145      Compute function over the locally owned part of the grid
146   */
147   for (j=ys; j<ys+ym; j++) {
148     y = j*hy;
149     for (i=xs; i<xs+xm; i++) {
150       x = i*hx;
151       if ((1.0 <= x) && (x <= 1.5) && (1.0 <= y) && (y <= 1.5)) u[j][i].v = .25*PetscPowReal(PetscSinReal(4.0*PETSC_PI*x),2.0)*PetscPowReal(PetscSinReal(4.0*PETSC_PI*y),2.0);
152       else u[j][i].v = 0.0;
153 
154       u[j][i].u = 1.0 - 2.0*u[j][i].v;
155     }
156   }
157 
158   /*
159      Restore vectors
160   */
161   ierr = DMDAVecRestoreArray(da,U,&u);CHKERRQ(ierr);
162   PetscFunctionReturn(0);
163 }
164 
165 int main(int argc,char **argv)
166 {
167   TS             ts;
168   Vec            U,Udot;
169   Mat            Jac,Jac2;
170   DM             da;
171   AppCtx         appctx;
172   PetscReal      t = 0,shift,norm;
173   PetscErrorCode ierr;
174 
175   ierr = PetscInitialize(&argc,&argv,(char*)0,help);if (ierr) return ierr;
176 
177   appctx.D1    = 8.0e-5;
178   appctx.D2    = 4.0e-5;
179   appctx.gamma = .024;
180   appctx.kappa = .06;
181 
182   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
183      Create distributed array (DMDA) to manage parallel grid and vectors
184   - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
185   ierr = DMDACreate2d(PETSC_COMM_WORLD,DM_BOUNDARY_PERIODIC,DM_BOUNDARY_PERIODIC,DMDA_STENCIL_STAR,64,64,PETSC_DECIDE,PETSC_DECIDE,2,1,NULL,NULL,&da);CHKERRQ(ierr);
186   ierr = DMSetFromOptions(da);CHKERRQ(ierr);
187   ierr = DMSetUp(da);CHKERRQ(ierr);
188   ierr = DMDASetFieldName(da,0,"u");CHKERRQ(ierr);
189   ierr = DMDASetFieldName(da,1,"v");CHKERRQ(ierr);
190   ierr = DMSetMatType(da,MATAIJ);CHKERRQ(ierr);
191   ierr = DMCreateMatrix(da,&Jac);CHKERRQ(ierr);
192   ierr = DMCreateMatrix(da,&Jac2);CHKERRQ(ierr);
193 
194   /*  - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
195      Extract global vectors from DMDA; then duplicate for remaining
196      vectors that are the same types
197    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
198   ierr = DMCreateGlobalVector(da,&U);CHKERRQ(ierr);
199   ierr = VecDuplicate(U,&Udot);CHKERRQ(ierr);
200   ierr = VecSet(Udot,0.0);CHKERRQ(ierr);
201 
202   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
203      Create timestepping solver context
204      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
205   ierr = TSCreate(PETSC_COMM_WORLD,&ts);CHKERRQ(ierr);
206   ierr = TSSetDM(ts,da);CHKERRQ(ierr);
207   ierr = TSSetProblemType(ts,TS_NONLINEAR);CHKERRQ(ierr);
208   ierr = TSSetRHSJacobian(ts,Jac,Jac,RHSJacobian,&appctx);CHKERRQ(ierr);
209 
210   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
211      Set initial conditions
212    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
213   ierr = InitialConditions(da,U);CHKERRQ(ierr);
214   ierr = TSSetSolution(ts,U);CHKERRQ(ierr);
215   ierr = TSSetFromOptions(ts);CHKERRQ(ierr);
216   ierr = TSSetUp(ts);CHKERRQ(ierr);
217 
218   shift = 2.;
219   ierr  = TSComputeIJacobian(ts,t,U,Udot,shift,Jac2,Jac2,PETSC_FALSE);CHKERRQ(ierr);
220   shift = 1.;
221   ierr  = TSComputeIJacobian(ts,t,U,Udot,shift,Jac,Jac,PETSC_FALSE);CHKERRQ(ierr);
222   shift = 2.;
223   ierr = TSComputeIJacobian(ts,t,U,Udot,shift,Jac,Jac,PETSC_FALSE);CHKERRQ(ierr);
224   ierr = MatAXPY(Jac,-1,Jac2,SAME_NONZERO_PATTERN);CHKERRQ(ierr);
225   ierr = MatNorm(Jac,NORM_INFINITY,&norm);CHKERRQ(ierr);
226   if (norm > 100.0*PETSC_MACHINE_EPSILON) {
227     ierr = PetscPrintf(PETSC_COMM_WORLD,"Error Norm %g \n Incorrect behaviour of TSComputeIJacobian(). The two matrices should have the same results.\n",norm);CHKERRQ(ierr);
228   }
229   ierr = MatDestroy(&Jac);CHKERRQ(ierr);
230   ierr = MatDestroy(&Jac2);CHKERRQ(ierr);
231   ierr = VecDestroy(&U);CHKERRQ(ierr);
232   ierr = VecDestroy(&Udot);CHKERRQ(ierr);
233   ierr = TSDestroy(&ts);CHKERRQ(ierr);
234   ierr = DMDestroy(&da);CHKERRQ(ierr);
235   ierr = PetscFinalize();
236   return(ierr);
237 }
238 
239 /*TEST
240   test:
241 
242 TEST*/
243