1 static char help[] = "Test TSComputeIJacobian()\n\n"; 2 3 #include <petscsys.h> 4 #include <petscdm.h> 5 #include <petscdmda.h> 6 #include <petscts.h> 7 8 typedef struct { 9 PetscScalar u,v; 10 } Field; 11 12 typedef struct { 13 PetscReal D1,D2,gamma,kappa; 14 } AppCtx; 15 16 PetscErrorCode RHSJacobian(TS ts,PetscReal t,Vec U,Mat A,Mat BB,void *ctx) 17 { 18 AppCtx *appctx = (AppCtx*)ctx; /* user-defined application context */ 19 DM da; 20 PetscInt i,j,Mx,My,xs,ys,xm,ym; 21 PetscReal hx,hy,sx,sy; 22 PetscScalar uc,vc; 23 Field **u; 24 Vec localU; 25 MatStencil stencil[6],rowstencil; 26 PetscScalar entries[6]; 27 28 PetscFunctionBegin; 29 PetscCall(TSGetDM(ts,&da)); 30 PetscCall(DMGetLocalVector(da,&localU)); 31 PetscCall(DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE)); 32 33 hx = 2.50/(PetscReal)Mx; sx = 1.0/(hx*hx); 34 hy = 2.50/(PetscReal)My; sy = 1.0/(hy*hy); 35 36 /* 37 Scatter ghost points to local vector,using the 2-step process 38 DMGlobalToLocalBegin(),DMGlobalToLocalEnd(). 39 By placing code between these two statements, computations can be 40 done while messages are in transition. 41 */ 42 PetscCall(DMGlobalToLocalBegin(da,U,INSERT_VALUES,localU)); 43 PetscCall(DMGlobalToLocalEnd(da,U,INSERT_VALUES,localU)); 44 45 /* 46 Get pointers to vector data 47 */ 48 PetscCall(DMDAVecGetArrayRead(da,localU,&u)); 49 50 /* 51 Get local grid boundaries 52 */ 53 PetscCall(DMDAGetCorners(da,&xs,&ys,NULL,&xm,&ym,NULL)); 54 55 stencil[0].k = 0; 56 stencil[1].k = 0; 57 stencil[2].k = 0; 58 stencil[3].k = 0; 59 stencil[4].k = 0; 60 stencil[5].k = 0; 61 rowstencil.k = 0; 62 /* 63 Compute function over the locally owned part of the grid 64 */ 65 for (j=ys; j<ys+ym; j++) { 66 67 stencil[0].j = j-1; 68 stencil[1].j = j+1; 69 stencil[2].j = j; 70 stencil[3].j = j; 71 stencil[4].j = j; 72 stencil[5].j = j; 73 rowstencil.k = 0; rowstencil.j = j; 74 for (i=xs; i<xs+xm; i++) { 75 uc = u[j][i].u; 76 vc = u[j][i].v; 77 78 /* uxx = (-2.0*uc + u[j][i-1].u + u[j][i+1].u)*sx; 79 uyy = (-2.0*uc + u[j-1][i].u + u[j+1][i].u)*sy; 80 81 vxx = (-2.0*vc + u[j][i-1].v + u[j][i+1].v)*sx; 82 vyy = (-2.0*vc + u[j-1][i].v + u[j+1][i].v)*sy; 83 f[j][i].u = appctx->D1*(uxx + uyy) - uc*vc*vc + appctx->gamma*(1.0 - uc);*/ 84 85 stencil[0].i = i; stencil[0].c = 0; entries[0] = appctx->D1*sy; 86 stencil[1].i = i; stencil[1].c = 0; entries[1] = appctx->D1*sy; 87 stencil[2].i = i-1; stencil[2].c = 0; entries[2] = appctx->D1*sx; 88 stencil[3].i = i+1; stencil[3].c = 0; entries[3] = appctx->D1*sx; 89 stencil[4].i = i; stencil[4].c = 0; entries[4] = -2.0*appctx->D1*(sx + sy) - vc*vc - appctx->gamma; 90 stencil[5].i = i; stencil[5].c = 1; entries[5] = -2.0*uc*vc; 91 rowstencil.i = i; rowstencil.c = 0; 92 93 PetscCall(MatSetValuesStencil(A,1,&rowstencil,6,stencil,entries,INSERT_VALUES)); 94 stencil[0].c = 1; entries[0] = appctx->D2*sy; 95 stencil[1].c = 1; entries[1] = appctx->D2*sy; 96 stencil[2].c = 1; entries[2] = appctx->D2*sx; 97 stencil[3].c = 1; entries[3] = appctx->D2*sx; 98 stencil[4].c = 1; entries[4] = -2.0*appctx->D2*(sx + sy) + 2.0*uc*vc - appctx->gamma - appctx->kappa; 99 stencil[5].c = 0; entries[5] = vc*vc; 100 rowstencil.c = 1; 101 102 PetscCall(MatSetValuesStencil(A,1,&rowstencil,6,stencil,entries,INSERT_VALUES)); 103 /* f[j][i].v = appctx->D2*(vxx + vyy) + uc*vc*vc - (appctx->gamma + appctx->kappa)*vc; */ 104 } 105 } 106 107 /* 108 Restore vectors 109 */ 110 PetscCall(PetscLogFlops(19*xm*ym)); 111 PetscCall(DMDAVecRestoreArrayRead(da,localU,&u)); 112 PetscCall(DMRestoreLocalVector(da,&localU)); 113 PetscCall(MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY)); 114 PetscCall(MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY)); 115 PetscCall(MatSetOption(A,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE)); 116 PetscFunctionReturn(0); 117 } 118 119 PetscErrorCode InitialConditions(DM da,Vec U) 120 { 121 PetscInt i,j,xs,ys,xm,ym,Mx,My; 122 Field **u; 123 PetscReal hx,hy,x,y; 124 125 PetscFunctionBegin; 126 PetscCall(DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE)); 127 128 hx = 2.5/(PetscReal)Mx; 129 hy = 2.5/(PetscReal)My; 130 131 /* 132 Get pointers to vector data 133 */ 134 PetscCall(DMDAVecGetArray(da,U,&u)); 135 136 /* 137 Get local grid boundaries 138 */ 139 PetscCall(DMDAGetCorners(da,&xs,&ys,NULL,&xm,&ym,NULL)); 140 141 /* 142 Compute function over the locally owned part of the grid 143 */ 144 for (j=ys; j<ys+ym; j++) { 145 y = j*hy; 146 for (i=xs; i<xs+xm; i++) { 147 x = i*hx; 148 if ((1.0 <= x) && (x <= 1.5) && (1.0 <= y) && (y <= 1.5)) u[j][i].v = .25*PetscPowReal(PetscSinReal(4.0*PETSC_PI*x),2.0)*PetscPowReal(PetscSinReal(4.0*PETSC_PI*y),2.0); 149 else u[j][i].v = 0.0; 150 151 u[j][i].u = 1.0 - 2.0*u[j][i].v; 152 } 153 } 154 155 /* 156 Restore vectors 157 */ 158 PetscCall(DMDAVecRestoreArray(da,U,&u)); 159 PetscFunctionReturn(0); 160 } 161 162 int main(int argc,char **argv) 163 { 164 TS ts; 165 Vec U,Udot; 166 Mat Jac,Jac2; 167 DM da; 168 AppCtx appctx; 169 PetscReal t = 0,shift,norm; 170 171 PetscCall(PetscInitialize(&argc,&argv,(char*)0,help)); 172 173 appctx.D1 = 8.0e-5; 174 appctx.D2 = 4.0e-5; 175 appctx.gamma = .024; 176 appctx.kappa = .06; 177 178 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 179 Create distributed array (DMDA) to manage parallel grid and vectors 180 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 181 PetscCall(DMDACreate2d(PETSC_COMM_WORLD,DM_BOUNDARY_PERIODIC,DM_BOUNDARY_PERIODIC,DMDA_STENCIL_STAR,64,64,PETSC_DECIDE,PETSC_DECIDE,2,1,NULL,NULL,&da)); 182 PetscCall(DMSetFromOptions(da)); 183 PetscCall(DMSetUp(da)); 184 PetscCall(DMDASetFieldName(da,0,"u")); 185 PetscCall(DMDASetFieldName(da,1,"v")); 186 PetscCall(DMSetMatType(da,MATAIJ)); 187 PetscCall(DMCreateMatrix(da,&Jac)); 188 PetscCall(DMCreateMatrix(da,&Jac2)); 189 190 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 191 Extract global vectors from DMDA; then duplicate for remaining 192 vectors that are the same types 193 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 194 PetscCall(DMCreateGlobalVector(da,&U)); 195 PetscCall(VecDuplicate(U,&Udot)); 196 PetscCall(VecSet(Udot,0.0)); 197 198 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 199 Create timestepping solver context 200 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 201 PetscCall(TSCreate(PETSC_COMM_WORLD,&ts)); 202 PetscCall(TSSetDM(ts,da)); 203 PetscCall(TSSetProblemType(ts,TS_NONLINEAR)); 204 PetscCall(TSSetRHSJacobian(ts,Jac,Jac,RHSJacobian,&appctx)); 205 206 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 207 Set initial conditions 208 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 209 PetscCall(InitialConditions(da,U)); 210 PetscCall(TSSetSolution(ts,U)); 211 PetscCall(TSSetFromOptions(ts)); 212 PetscCall(TSSetUp(ts)); 213 214 shift = 2.; 215 PetscCall(TSComputeIJacobian(ts,t,U,Udot,shift,Jac2,Jac2,PETSC_FALSE)); 216 shift = 1.; 217 PetscCall(TSComputeIJacobian(ts,t,U,Udot,shift,Jac,Jac,PETSC_FALSE)); 218 shift = 2.; 219 PetscCall(TSComputeIJacobian(ts,t,U,Udot,shift,Jac,Jac,PETSC_FALSE)); 220 PetscCall(MatAXPY(Jac,-1,Jac2,SAME_NONZERO_PATTERN)); 221 PetscCall(MatNorm(Jac,NORM_INFINITY,&norm)); 222 if (norm > 100.0*PETSC_MACHINE_EPSILON) { 223 PetscCall(PetscPrintf(PETSC_COMM_WORLD,"Error Norm %g \n Incorrect behaviour of TSComputeIJacobian(). The two matrices should have the same results.\n",(double)norm)); 224 } 225 PetscCall(MatDestroy(&Jac)); 226 PetscCall(MatDestroy(&Jac2)); 227 PetscCall(VecDestroy(&U)); 228 PetscCall(VecDestroy(&Udot)); 229 PetscCall(TSDestroy(&ts)); 230 PetscCall(DMDestroy(&da)); 231 PetscCall(PetscFinalize()); 232 return 0; 233 } 234 235 /*TEST 236 test: 237 238 TEST*/ 239