xref: /petsc/src/ts/tests/ex24.c (revision a4af0ceea8a251db97ee0dc5c0d52d4adf50264a)
1 static char help[] = "Test TSComputeIJacobian()\n\n";
2 
3 #include <petscsys.h>
4 #include <petscdm.h>
5 #include <petscdmda.h>
6 #include <petscts.h>
7 
8 typedef struct {
9   PetscScalar u,v;
10 } Field;
11 
12 typedef struct {
13   PetscReal D1,D2,gamma,kappa;
14 } AppCtx;
15 
16 PetscErrorCode RHSJacobian(TS ts,PetscReal t,Vec U,Mat A,Mat BB,void *ctx)
17 {
18   AppCtx         *appctx = (AppCtx*)ctx;     /* user-defined application context */
19   DM             da;
20   PetscErrorCode ierr;
21   PetscInt       i,j,Mx,My,xs,ys,xm,ym;
22   PetscReal      hx,hy,sx,sy;
23   PetscScalar    uc,vc;
24   Field          **u;
25   Vec            localU;
26   MatStencil     stencil[6],rowstencil;
27   PetscScalar    entries[6];
28 
29   PetscFunctionBegin;
30   ierr = TSGetDM(ts,&da);CHKERRQ(ierr);
31   ierr = DMGetLocalVector(da,&localU);CHKERRQ(ierr);
32   ierr = DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE);CHKERRQ(ierr);
33 
34   hx = 2.50/(PetscReal)Mx; sx = 1.0/(hx*hx);
35   hy = 2.50/(PetscReal)My; sy = 1.0/(hy*hy);
36 
37   /*
38      Scatter ghost points to local vector,using the 2-step process
39         DMGlobalToLocalBegin(),DMGlobalToLocalEnd().
40      By placing code between these two statements, computations can be
41      done while messages are in transition.
42   */
43   ierr = DMGlobalToLocalBegin(da,U,INSERT_VALUES,localU);CHKERRQ(ierr);
44   ierr = DMGlobalToLocalEnd(da,U,INSERT_VALUES,localU);CHKERRQ(ierr);
45 
46   /*
47      Get pointers to vector data
48   */
49   ierr = DMDAVecGetArrayRead(da,localU,&u);CHKERRQ(ierr);
50 
51   /*
52      Get local grid boundaries
53   */
54   ierr = DMDAGetCorners(da,&xs,&ys,NULL,&xm,&ym,NULL);CHKERRQ(ierr);
55 
56   stencil[0].k = 0;
57   stencil[1].k = 0;
58   stencil[2].k = 0;
59   stencil[3].k = 0;
60   stencil[4].k = 0;
61   stencil[5].k = 0;
62   rowstencil.k = 0;
63   /*
64      Compute function over the locally owned part of the grid
65   */
66   for (j=ys; j<ys+ym; j++) {
67 
68     stencil[0].j = j-1;
69     stencil[1].j = j+1;
70     stencil[2].j = j;
71     stencil[3].j = j;
72     stencil[4].j = j;
73     stencil[5].j = j;
74     rowstencil.k = 0; rowstencil.j = j;
75     for (i=xs; i<xs+xm; i++) {
76       uc = u[j][i].u;
77       vc = u[j][i].v;
78 
79       /*      uxx       = (-2.0*uc + u[j][i-1].u + u[j][i+1].u)*sx;
80       uyy       = (-2.0*uc + u[j-1][i].u + u[j+1][i].u)*sy;
81 
82       vxx       = (-2.0*vc + u[j][i-1].v + u[j][i+1].v)*sx;
83       vyy       = (-2.0*vc + u[j-1][i].v + u[j+1][i].v)*sy;
84        f[j][i].u = appctx->D1*(uxx + uyy) - uc*vc*vc + appctx->gamma*(1.0 - uc);*/
85 
86       stencil[0].i = i; stencil[0].c = 0; entries[0] = appctx->D1*sy;
87       stencil[1].i = i; stencil[1].c = 0; entries[1] = appctx->D1*sy;
88       stencil[2].i = i-1; stencil[2].c = 0; entries[2] = appctx->D1*sx;
89       stencil[3].i = i+1; stencil[3].c = 0; entries[3] = appctx->D1*sx;
90       stencil[4].i = i; stencil[4].c = 0; entries[4] = -2.0*appctx->D1*(sx + sy) - vc*vc - appctx->gamma;
91       stencil[5].i = i; stencil[5].c = 1; entries[5] = -2.0*uc*vc;
92       rowstencil.i = i; rowstencil.c = 0;
93 
94       ierr = MatSetValuesStencil(A,1,&rowstencil,6,stencil,entries,INSERT_VALUES);CHKERRQ(ierr);
95       stencil[0].c = 1; entries[0] = appctx->D2*sy;
96       stencil[1].c = 1; entries[1] = appctx->D2*sy;
97       stencil[2].c = 1; entries[2] = appctx->D2*sx;
98       stencil[3].c = 1; entries[3] = appctx->D2*sx;
99       stencil[4].c = 1; entries[4] = -2.0*appctx->D2*(sx + sy) + 2.0*uc*vc - appctx->gamma - appctx->kappa;
100       stencil[5].c = 0; entries[5] = vc*vc;
101       rowstencil.c = 1;
102 
103       ierr = MatSetValuesStencil(A,1,&rowstencil,6,stencil,entries,INSERT_VALUES);CHKERRQ(ierr);
104       /* f[j][i].v = appctx->D2*(vxx + vyy) + uc*vc*vc - (appctx->gamma + appctx->kappa)*vc; */
105     }
106   }
107 
108   /*
109      Restore vectors
110   */
111   ierr = PetscLogFlops(19*xm*ym);CHKERRQ(ierr);
112   ierr = DMDAVecRestoreArrayRead(da,localU,&u);CHKERRQ(ierr);
113   ierr = DMRestoreLocalVector(da,&localU);CHKERRQ(ierr);
114   ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
115   ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
116   ierr = MatSetOption(A,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE);CHKERRQ(ierr);
117   PetscFunctionReturn(0);
118 }
119 
120 PetscErrorCode InitialConditions(DM da,Vec U)
121 {
122   PetscErrorCode ierr;
123   PetscInt       i,j,xs,ys,xm,ym,Mx,My;
124   Field          **u;
125   PetscReal      hx,hy,x,y;
126 
127   PetscFunctionBegin;
128   ierr = DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE);CHKERRQ(ierr);
129 
130   hx = 2.5/(PetscReal)Mx;
131   hy = 2.5/(PetscReal)My;
132 
133   /*
134      Get pointers to vector data
135   */
136   ierr = DMDAVecGetArray(da,U,&u);CHKERRQ(ierr);
137 
138   /*
139      Get local grid boundaries
140   */
141   ierr = DMDAGetCorners(da,&xs,&ys,NULL,&xm,&ym,NULL);CHKERRQ(ierr);
142 
143   /*
144      Compute function over the locally owned part of the grid
145   */
146   for (j=ys; j<ys+ym; j++) {
147     y = j*hy;
148     for (i=xs; i<xs+xm; i++) {
149       x = i*hx;
150       if ((1.0 <= x) && (x <= 1.5) && (1.0 <= y) && (y <= 1.5)) u[j][i].v = .25*PetscPowReal(PetscSinReal(4.0*PETSC_PI*x),2.0)*PetscPowReal(PetscSinReal(4.0*PETSC_PI*y),2.0);
151       else u[j][i].v = 0.0;
152 
153       u[j][i].u = 1.0 - 2.0*u[j][i].v;
154     }
155   }
156 
157   /*
158      Restore vectors
159   */
160   ierr = DMDAVecRestoreArray(da,U,&u);CHKERRQ(ierr);
161   PetscFunctionReturn(0);
162 }
163 
164 int main(int argc,char **argv)
165 {
166   TS             ts;
167   Vec            U,Udot;
168   Mat            Jac,Jac2;
169   DM             da;
170   AppCtx         appctx;
171   PetscReal      t = 0,shift,norm;
172   PetscErrorCode ierr;
173 
174   ierr = PetscInitialize(&argc,&argv,(char*)0,help);if (ierr) return ierr;
175 
176   appctx.D1    = 8.0e-5;
177   appctx.D2    = 4.0e-5;
178   appctx.gamma = .024;
179   appctx.kappa = .06;
180 
181   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
182      Create distributed array (DMDA) to manage parallel grid and vectors
183   - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
184   ierr = DMDACreate2d(PETSC_COMM_WORLD,DM_BOUNDARY_PERIODIC,DM_BOUNDARY_PERIODIC,DMDA_STENCIL_STAR,64,64,PETSC_DECIDE,PETSC_DECIDE,2,1,NULL,NULL,&da);CHKERRQ(ierr);
185   ierr = DMSetFromOptions(da);CHKERRQ(ierr);
186   ierr = DMSetUp(da);CHKERRQ(ierr);
187   ierr = DMDASetFieldName(da,0,"u");CHKERRQ(ierr);
188   ierr = DMDASetFieldName(da,1,"v");CHKERRQ(ierr);
189   ierr = DMSetMatType(da,MATAIJ);CHKERRQ(ierr);
190   ierr = DMCreateMatrix(da,&Jac);CHKERRQ(ierr);
191   ierr = DMCreateMatrix(da,&Jac2);CHKERRQ(ierr);
192 
193   /*  - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
194      Extract global vectors from DMDA; then duplicate for remaining
195      vectors that are the same types
196    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
197   ierr = DMCreateGlobalVector(da,&U);CHKERRQ(ierr);
198   ierr = VecDuplicate(U,&Udot);CHKERRQ(ierr);
199   ierr = VecSet(Udot,0.0);CHKERRQ(ierr);
200 
201   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
202      Create timestepping solver context
203      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
204   ierr = TSCreate(PETSC_COMM_WORLD,&ts);CHKERRQ(ierr);
205   ierr = TSSetDM(ts,da);CHKERRQ(ierr);
206   ierr = TSSetProblemType(ts,TS_NONLINEAR);CHKERRQ(ierr);
207   ierr = TSSetRHSJacobian(ts,Jac,Jac,RHSJacobian,&appctx);CHKERRQ(ierr);
208 
209   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
210      Set initial conditions
211    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
212   ierr = InitialConditions(da,U);CHKERRQ(ierr);
213   ierr = TSSetSolution(ts,U);CHKERRQ(ierr);
214   ierr = TSSetFromOptions(ts);CHKERRQ(ierr);
215   ierr = TSSetUp(ts);CHKERRQ(ierr);
216 
217   shift = 2.;
218   ierr  = TSComputeIJacobian(ts,t,U,Udot,shift,Jac2,Jac2,PETSC_FALSE);CHKERRQ(ierr);
219   shift = 1.;
220   ierr  = TSComputeIJacobian(ts,t,U,Udot,shift,Jac,Jac,PETSC_FALSE);CHKERRQ(ierr);
221   shift = 2.;
222   ierr = TSComputeIJacobian(ts,t,U,Udot,shift,Jac,Jac,PETSC_FALSE);CHKERRQ(ierr);
223   ierr = MatAXPY(Jac,-1,Jac2,SAME_NONZERO_PATTERN);CHKERRQ(ierr);
224   ierr = MatNorm(Jac,NORM_INFINITY,&norm);CHKERRQ(ierr);
225   if (norm > 100.0*PETSC_MACHINE_EPSILON) {
226     ierr = PetscPrintf(PETSC_COMM_WORLD,"Error Norm %g \n Incorrect behaviour of TSComputeIJacobian(). The two matrices should have the same results.\n",norm);CHKERRQ(ierr);
227   }
228   ierr = MatDestroy(&Jac);CHKERRQ(ierr);
229   ierr = MatDestroy(&Jac2);CHKERRQ(ierr);
230   ierr = VecDestroy(&U);CHKERRQ(ierr);
231   ierr = VecDestroy(&Udot);CHKERRQ(ierr);
232   ierr = TSDestroy(&ts);CHKERRQ(ierr);
233   ierr = DMDestroy(&da);CHKERRQ(ierr);
234   ierr = PetscFinalize();
235   return(ierr);
236 }
237 
238 /*TEST
239   test:
240 
241 TEST*/
242