/* ------------------------------------------------------------------------ Solid Fuel Ignition (SFI) problem. This problem is modeled by the partial differential equation -Laplacian(u) - lambda * exp(u) = 0, 0 < x,y,z < 1, with boundary conditions u = 0 for x = 0, x = 1, y = 0, y = 1, z = 0, z = 1 A finite difference approximation with the usual 7-point stencil is used to discretize the boundary value problem to obtain a nonlinear system of equations. The problem is solved in a 3D rectangular domain, using distributed arrays (DAs) to partition the parallel grid. ------------------------------------------------------------------------- */ #include "Bratu3Dimpl.h" PetscErrorCode FormInitGuess(DM da, Vec X, Params *p) { PetscInt i,j,k,Mx,My,Mz,xs,ys,zs,xm,ym,zm; PetscReal lambda,temp1,hx,hy,hz,tempk,tempj; PetscScalar ***x; PetscFunctionBegin; PetscCall(DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,&Mz,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE)); lambda = p->lambda_; hx = 1.0/(PetscReal)(Mx-1); hy = 1.0/(PetscReal)(My-1); hz = 1.0/(PetscReal)(Mz-1); temp1 = lambda/(lambda + 1.0); /* Get a pointer to vector data. - For default PETSc vectors, VecGetArray() returns a pointer to the data array. Otherwise, the routine is implementation dependent. - You MUST call VecRestoreArray() when you no longer need access to the array. */ PetscCall(DMDAVecGetArray(da,X,&x)); /* Get local grid boundaries (for 3-dimensional DMDA): - xs, ys, zs: starting grid indices (no ghost points) - xm, ym, zm: widths of local grid (no ghost points) */ PetscCall(DMDAGetCorners(da,&xs,&ys,&zs,&xm,&ym,&zm)); /* Compute initial guess over the locally owned part of the grid */ for (k=zs; klambda_; hx = 1.0/(PetscReal)(Mx-1); hy = 1.0/(PetscReal)(My-1); hz = 1.0/(PetscReal)(Mz-1); sc = hx*hy*hz*lambda; hxhzdhy = hx*hz/hy; hyhzdhx = hy*hz/hx; hxhydhz = hx*hy/hz; /* */ PetscCall(DMGetLocalVector(da,&localX)); /* Scatter ghost points to local vector,using the 2-step process DMGlobalToLocalBegin(),DMGlobalToLocalEnd(). By placing code between these two statements, computations can be done while messages are in transition. */ PetscCall(DMGlobalToLocalBegin(da,X,INSERT_VALUES,localX)); PetscCall(DMGlobalToLocalEnd(da,X,INSERT_VALUES,localX)); /* Get pointers to vector data. */ PetscCall(DMDAVecGetArray(da,localX,&x)); PetscCall(DMDAVecGetArray(da,F,&f)); /* Get local grid boundaries. */ PetscCall(DMDAGetCorners(da,&xs,&ys,&zs,&xm,&ym,&zm)); /* Compute function over the locally owned part of the grid. */ for (k=zs; klambda_; hx = 1.0/(PetscReal)(Mx-1); hy = 1.0/(PetscReal)(My-1); hz = 1.0/(PetscReal)(Mz-1); sc = hx*hy*hz*lambda; hxhzdhy = hx*hz/hy; hyhzdhx = hy*hz/hx; hxhydhz = hx*hy/hz; /* */ PetscCall(DMGetLocalVector(da,&localX)); /* Scatter ghost points to local vector, using the 2-step process DMGlobalToLocalBegin(), DMGlobalToLocalEnd(). By placing code between these two statements, computations can be done while messages are in transition. */ PetscCall(DMGlobalToLocalBegin(da,X,INSERT_VALUES,localX)); PetscCall(DMGlobalToLocalEnd(da,X,INSERT_VALUES,localX)); /* Get pointer to vector data. */ PetscCall(DMDAVecGetArray(da,localX,&x)); /* Get local grid boundaries. */ PetscCall(DMDAGetCorners(da,&xs,&ys,&zs,&xm,&ym,&zm)); /* Compute entries for the locally owned part of the Jacobian. - Currently, all PETSc parallel matrix formats are partitioned by contiguous chunks of rows across the processors. - Each processor needs to insert only elements that it owns locally (but any non-local elements will be sent to the appropriate processor during matrix assembly). - Here, we set all entries for a particular row at once. - We can set matrix entries either using either MatSetValuesLocal() or MatSetValues(), as discussed above. */ for (k=zs; k