xref: /petsc/src/binding/petsc4py/demo/legacy/wrap-swig/Bratu3D.c (revision 970231d20df44f79b27787157e39d441e79f434b)
1 /* ------------------------------------------------------------------------
2 
3     Solid Fuel Ignition (SFI) problem.  This problem is modeled by the
4     partial differential equation
5 
6             -Laplacian(u) - lambda * exp(u) = 0,  0 < x,y,z < 1,
7 
8     with boundary conditions
9 
10              u = 0  for  x = 0, x = 1, y = 0, y = 1, z = 0, z = 1
11 
12     A finite difference approximation with the usual 7-point stencil
13     is used to discretize the boundary value problem to obtain a
14     nonlinear system of equations. The problem is solved in a 3D
15     rectangular domain, using distributed arrays (DAs) to partition
16     the parallel grid.
17 
18   ------------------------------------------------------------------------- */
19 
20 #include "Bratu3D.h"
21 
FormInitGuess(DM da,Vec X,Params * p)22 PetscErrorCode FormInitGuess(DM da, Vec X, Params *p)
23 {
24   PetscInt       i,j,k,Mx,My,Mz,xs,ys,zs,xm,ym,zm;
25   PetscReal      lambda,temp1,hx,hy,hz,tempk,tempj;
26   PetscScalar    ***x;
27 
28   PetscFunctionBegin;
29   PetscCall(DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,&Mz,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE));
30   lambda = p->lambda_;
31   hx     = 1.0/(PetscReal)(Mx-1);
32   hy     = 1.0/(PetscReal)(My-1);
33   hz     = 1.0/(PetscReal)(Mz-1);
34   temp1  = lambda/(lambda + 1.0);
35 
36   /*
37     Get a pointer to vector data.
38 
39     - For default PETSc vectors, VecGetArray() returns a pointer to
40       the data array.  Otherwise, the routine is implementation
41       dependent.
42 
43     - You MUST call VecRestoreArray() when you no longer need access
44       to the array.
45   */
46   PetscCall(DMDAVecGetArray(da,X,&x));
47 
48   /*
49     Get local grid boundaries (for 3-dimensional DMDA):
50 
51     - xs, ys, zs: starting grid indices (no ghost points)
52 
53     - xm, ym, zm: widths of local grid (no ghost points)
54   */
55   PetscCall(DMDAGetCorners(da,&xs,&ys,&zs,&xm,&ym,&zm));
56 
57   /*
58     Compute initial guess over the locally owned part of the grid
59   */
60   for (k=zs; k<zs+zm; k++) {
61     tempk = (PetscReal)(PetscMin(k,Mz-k-1))*hz;
62     for (j=ys; j<ys+ym; j++) {
63       tempj = PetscMin((PetscReal)(PetscMin(j,My-j-1))*hy,tempk);
64       for (i=xs; i<xs+xm; i++) {
65         if (i == 0 || j == 0 || k == 0 || i == Mx-1 || j == My-1 || k == Mz-1) {
66           /* boundary conditions are all zero Dirichlet */
67           x[k][j][i] = 0.0;
68         } else {
69           x[k][j][i] = temp1*sqrt(PetscMin((PetscReal)(PetscMin(i,Mx-i-1))*hx,tempj));
70         }
71       }
72     }
73   }
74 
75   /*
76     Restore vector
77   */
78   PetscCall(DMDAVecRestoreArray(da,X,&x));
79   PetscFunctionReturn(PETSC_SUCCESS);
80 }
81 
FormFunction(DM da,Vec X,Vec F,Params * p)82 PetscErrorCode FormFunction(DM da, Vec X, Vec F, Params *p)
83 {
84   PetscInt       i,j,k,Mx,My,Mz,xs,ys,zs,xm,ym,zm;
85   PetscReal      two = 2.0,lambda,hx,hy,hz,hxhzdhy,hyhzdhx,hxhydhz,sc;
86   PetscScalar    u_north,u_south,u_east,u_west,u_up,u_down,u,u_xx,u_yy,u_zz,***x,***f;
87   Vec            localX;
88 
89   PetscFunctionBegin;
90   PetscCall(DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,&Mz,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE));
91   lambda = p->lambda_;
92   hx     = 1.0/(PetscReal)(Mx-1);
93   hy     = 1.0/(PetscReal)(My-1);
94   hz     = 1.0/(PetscReal)(Mz-1);
95   sc     = hx*hy*hz*lambda;
96   hxhzdhy = hx*hz/hy;
97   hyhzdhx = hy*hz/hx;
98   hxhydhz = hx*hy/hz;
99 
100   PetscCall(DMGetLocalVector(da,&localX));
101 
102   /*
103     Scatter ghost points to local vector,using the 2-step process
104     DMGlobalToLocalBegin(),DMGlobalToLocalEnd().  By placing code
105     between these two statements, computations can be done while
106     messages are in transition.
107   */
108   PetscCall(DMGlobalToLocalBegin(da,X,INSERT_VALUES,localX));
109   PetscCall(DMGlobalToLocalEnd(da,X,INSERT_VALUES,localX));
110 
111   /*
112     Get pointers to vector data.
113   */
114   PetscCall(DMDAVecGetArray(da,localX,&x));
115   PetscCall(DMDAVecGetArray(da,F,&f));
116 
117   /*
118     Get local grid boundaries.
119   */
120   PetscCall(DMDAGetCorners(da,&xs,&ys,&zs,&xm,&ym,&zm));
121 
122   /*
123     Compute function over the locally owned part of the grid.
124   */
125   for (k=zs; k<zs+zm; k++) {
126     for (j=ys; j<ys+ym; j++) {
127       for (i=xs; i<xs+xm; i++) {
128         if (i == 0 || j == 0 || k == 0 || i == Mx-1 || j == My-1 || k == Mz-1) {
129           /* boundary points */
130           f[k][j][i] = x[k][j][i] - 0.0;
131         } else {
132           /* interior grid points */
133           u           = x[k][j][i];
134           u_east      = x[k][j][i+1];
135           u_west      = x[k][j][i-1];
136           u_north     = x[k][j+1][i];
137           u_south     = x[k][j-1][i];
138           u_up        = x[k+1][j][i];
139           u_down      = x[k-1][j][i];
140           u_xx        = (-u_east + two*u - u_west)*hyhzdhx;
141           u_yy        = (-u_north + two*u - u_south)*hxhzdhy;
142           u_zz        = (-u_up + two*u - u_down)*hxhydhz;
143           f[k][j][i]  = u_xx + u_yy + u_zz - sc*PetscExpScalar(u);
144         }
145       }
146     }
147   }
148 
149   /*
150     Restore vectors.
151   */
152   PetscCall(DMDAVecRestoreArray(da,F,&f));
153   PetscCall(DMDAVecRestoreArray(da,localX,&x));
154   PetscCall(DMRestoreLocalVector(da,&localX));
155   PetscCall(PetscLogFlops(11.0*ym*xm));
156   PetscFunctionReturn(PETSC_SUCCESS);
157 }
158 
FormJacobian(DM da,Vec X,Mat J,Params * p)159 PetscErrorCode FormJacobian(DM da, Vec X, Mat J, Params *p)
160 {
161   PetscInt       i,j,k,Mx,My,Mz,xs,ys,zs,xm,ym,zm;
162   PetscReal      lambda,hx,hy,hz,hxhzdhy,hyhzdhx,hxhydhz,sc;
163   PetscScalar    v[7],***x;
164   MatStencil     col[7],row;
165   Vec            localX;
166 
167   PetscFunctionBegin;
168   PetscCall(DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,&Mz,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE));
169   lambda = p->lambda_;
170   hx     = 1.0/(PetscReal)(Mx-1);
171   hy     = 1.0/(PetscReal)(My-1);
172   hz     = 1.0/(PetscReal)(Mz-1);
173   sc     = hx*hy*hz*lambda;
174   hxhzdhy = hx*hz/hy;
175   hyhzdhx = hy*hz/hx;
176   hxhydhz = hx*hy/hz;
177 
178   PetscCall(DMGetLocalVector(da,&localX));
179 
180   /*
181     Scatter ghost points to local vector, using the 2-step process
182     DMGlobalToLocalBegin(), DMGlobalToLocalEnd().  By placing code
183     between these two statements, computations can be done while
184     messages are in transition.
185   */
186   PetscCall(DMGlobalToLocalBegin(da,X,INSERT_VALUES,localX));
187   PetscCall(DMGlobalToLocalEnd(da,X,INSERT_VALUES,localX));
188 
189   /*
190     Get pointer to vector data.
191   */
192   PetscCall(DMDAVecGetArray(da,localX,&x));
193 
194   /*
195     Get local grid boundaries.
196   */
197   PetscCall(DMDAGetCorners(da,&xs,&ys,&zs,&xm,&ym,&zm));
198 
199   /*
200     Compute entries for the locally owned part of the Jacobian.
201 
202     - Currently, all PETSc parallel matrix formats are partitioned by
203       contiguous chunks of rows across the processors.
204 
205     - Each processor needs to insert only elements that it owns
206       locally (but any non-local elements will be sent to the
207       appropriate processor during matrix assembly).
208 
209     - Here, we set all entries for a particular row at once.
210 
211     - We can set matrix entries either using either
212       MatSetValuesLocal() or MatSetValues(), as discussed above.
213   */
214   for (k=zs; k<zs+zm; k++) {
215     for (j=ys; j<ys+ym; j++) {
216       for (i=xs; i<xs+xm; i++) {
217         row.k = k; row.j = j; row.i = i;
218         /* boundary points */
219         if (i == 0 || j == 0 || k == 0|| i == Mx-1 || j == My-1 || k == Mz-1) {
220           v[0] = 1.0;
221           PetscCall(MatSetValuesStencil(J,1,&row,1,&row,v,INSERT_VALUES));
222         } else {
223         /* interior grid points */
224           v[0] = -hxhydhz; col[0].k=k-1;col[0].j=j;  col[0].i = i;
225           v[1] = -hxhzdhy; col[1].k=k;  col[1].j=j-1;col[1].i = i;
226           v[2] = -hyhzdhx; col[2].k=k;  col[2].j=j;  col[2].i = i-1;
227           v[3] = 2.0*(hyhzdhx+hxhzdhy+hxhydhz)-sc*PetscExpScalar(x[k][j][i]);col[3].k=row.k;col[3].j=row.j;col[3].i = row.i;
228           v[4] = -hyhzdhx; col[4].k=k;  col[4].j=j;  col[4].i = i+1;
229           v[5] = -hxhzdhy; col[5].k=k;  col[5].j=j+1;col[5].i = i;
230           v[6] = -hxhydhz; col[6].k=k+1;col[6].j=j;  col[6].i = i;
231           PetscCall(MatSetValuesStencil(J,1,&row,7,col,v,INSERT_VALUES));
232         }
233       }
234     }
235   }
236   PetscCall(DMDAVecRestoreArray(da,localX,&x));
237   PetscCall(DMRestoreLocalVector(da,&localX));
238 
239   /*
240     Assemble matrix, using the 2-step process: MatAssemblyBegin(),
241     MatAssemblyEnd().
242   */
243   PetscCall(MatAssemblyBegin(J,MAT_FINAL_ASSEMBLY));
244   PetscCall(MatAssemblyEnd(J,MAT_FINAL_ASSEMBLY));
245 
246   /*
247     Tell the matrix we will never add a new nonzero location to the
248     matrix. If we do, it will generate an error.
249   */
250   PetscCall(MatSetOption(J,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE));
251   PetscFunctionReturn(PETSC_SUCCESS);
252 }
253