xref: /petsc/src/dm/tutorials/ex13f90.F90 (revision 749c190bad46ba447444c173d8c7a4080c70750e)
1c5e229c2SMartin Diehl#include <petsc/finclude/petscdmda.h>
2c4762a1bSJed Brownprogram main
3c4762a1bSJed Brown!
4c4762a1bSJed Brown! This example intends to show how DMDA is used to solve a PDE on a decomposed
5c4762a1bSJed Brown! domain. The equation we are solving is not a PDE, but a toy example: van der
6c4762a1bSJed Brown! Pol's 2-variable ODE duplicated onto a 3D grid:
7c4762a1bSJed Brown! dx/dt = y
8c4762a1bSJed Brown! dy/dt = mu(1-x**2)y - x
9c4762a1bSJed Brown!
10c4762a1bSJed Brown! So we are solving the same equation on all grid points, with no spatial
11c4762a1bSJed Brown! dependencies. Still we tell PETSc to communicate (stencil width >0) so we
12c4762a1bSJed Brown! have communication between different parts of the domain.
13c4762a1bSJed Brown!
14c4762a1bSJed Brown! The example is structured so that one can replace the RHS function and
15c4762a1bSJed Brown! the forw_euler routine with a suitable RHS and a suitable time-integration
16c4762a1bSJed Brown! scheme and make little or no modifications to the DMDA parts. In particular,
17c4762a1bSJed Brown! the "inner" parts of the RHS and time-integration do not "know about" the
18c4762a1bSJed Brown! decomposed domain.
19c4762a1bSJed Brown!
20c4762a1bSJed Brown!     See:     http://dx.doi.org/10.6084/m9.figshare.1368581
21c4762a1bSJed Brown!
22c4762a1bSJed Brown!     Contributed by Aasmund Ervik (asmunder at pvv.org)
23c4762a1bSJed Brown!
2477d968b7SBarry Smith  use ex13f90auxmodule
25c4762a1bSJed Brown
26c4762a1bSJed Brown  use petscdmda
27c4762a1bSJed Brown
28c4762a1bSJed Brown  PetscErrorCode ierr
29c4762a1bSJed Brown  PetscMPIInt rank, size
30*b06eb4cdSBarry Smith  MPIU_Comm comm
31c4762a1bSJed Brown  Vec Lvec, coords
32c4762a1bSJed Brown  DM SolScal, CoordDM
33c4762a1bSJed Brown  DMBoundaryType b_x, b_y, b_z
34c4762a1bSJed Brown  PetscReal, pointer :: array(:, :, :, :)
35c4762a1bSJed Brown  PetscReal :: t, tend, dt, xmin, xmax, ymin, ymax, zmin, zmax, xgmin, xgmax, ygmin, ygmax, zgmin, zgmax
36c4762a1bSJed Brown  PetscReal, allocatable :: f(:, :, :, :), grid(:, :, :, :)
37c4762a1bSJed Brown  PetscInt :: i, j, k, igmax, jgmax, kgmax, ib1, ibn, jb1, jbn, kb1, kbn, imax, jmax, kmax, itime, maxstep, nscreen, dof, stw, ndim
38c4762a1bSJed Brown
39c4762a1bSJed Brown  ! Fire up PETSc:
40d8606c27SBarry Smith  PetscCallA(PetscInitialize(ierr))
41d8606c27SBarry Smith
42c4762a1bSJed Brown  comm = PETSC_COMM_WORLD
43d8606c27SBarry Smith  PetscCallMPIA(MPI_Comm_rank(comm, rank, ierr))
44d8606c27SBarry Smith  PetscCallMPIA(MPI_Comm_size(comm, size, ierr))
45c4762a1bSJed Brown  if (rank == 0) then
46c4762a1bSJed Brown    write (*, *) 'Hi! We are solving van der Pol using ', size, ' processes.'
47c4762a1bSJed Brown    write (*, *) ' '
48c4762a1bSJed Brown    write (*, *) '  t     x1         x2'
49c4762a1bSJed Brown  end if
50c4762a1bSJed Brown
51c4762a1bSJed Brown  ! Set up the global grid
52c4762a1bSJed Brown  igmax = 50
53c4762a1bSJed Brown  jgmax = 50
54c4762a1bSJed Brown  kgmax = 50
55c4762a1bSJed Brown  xgmin = 0.0
56c4762a1bSJed Brown  ygmin = 0.0
57c4762a1bSJed Brown  zgmin = 0.0
58c4762a1bSJed Brown  xgmax = 1.0
59c4762a1bSJed Brown  ygmax = 1.0
60c4762a1bSJed Brown  zgmax = 1.0
61c4762a1bSJed Brown  stw = 1 ! stencil width
62c4762a1bSJed Brown  dof = 2 ! number of variables in this DA
63c4762a1bSJed Brown  ndim = 3 ! 3D code
64c4762a1bSJed Brown
65c4762a1bSJed Brown  ! Get the BCs and create the DMDA
66d8606c27SBarry Smith  call get_boundary_cond(b_x, b_y, b_z)
675d83a8b1SBarry Smith  PetscCallA(DMDACreate3d(comm, b_x, b_y, b_z, DMDA_STENCIL_STAR, igmax, jgmax, kgmax, PETSC_DECIDE, PETSC_DECIDE, PETSC_DECIDE, dof, stw, PETSC_NULL_INTEGER_ARRAY, PETSC_NULL_INTEGER_ARRAY, PETSC_NULL_INTEGER_ARRAY, SolScal, ierr))
68d8606c27SBarry Smith  PetscCallA(DMSetFromOptions(SolScal, ierr))
69d8606c27SBarry Smith  PetscCallA(DMSetUp(SolScal, ierr))
70c4762a1bSJed Brown
71c4762a1bSJed Brown  ! Set global coordinates, get a global and a local work vector
72d8606c27SBarry Smith  PetscCallA(DMDASetUniformCoordinates(SolScal, xgmin, xgmax, ygmin, ygmax, zgmin, zgmax, ierr))
73d8606c27SBarry Smith  PetscCallA(DMCreateLocalVector(SolScal, Lvec, ierr))
74c4762a1bSJed Brown
75c4762a1bSJed Brown  ! Get ib1,imax,ibn etc. of the local grid.
76c4762a1bSJed Brown  ! Our convention is:
77c4762a1bSJed Brown  ! the first local ghost cell is ib1
78c4762a1bSJed Brown  ! the first local       cell is 1
79c4762a1bSJed Brown  ! the last  local       cell is imax
80c4762a1bSJed Brown  ! the last  local ghost cell is ibn.
81c4762a1bSJed Brown  !
82c4762a1bSJed Brown  ! i,j,k must be in this call, but are not used
83d8606c27SBarry Smith  PetscCallA(DMDAGetCorners(SolScal, i, j, k, imax, jmax, kmax, ierr))
84c4762a1bSJed Brown  ib1 = 1 - stw
85c4762a1bSJed Brown  jb1 = 1 - stw
86c4762a1bSJed Brown  kb1 = 1 - stw
87c4762a1bSJed Brown  ibn = imax + stw
88c4762a1bSJed Brown  jbn = jmax + stw
89c4762a1bSJed Brown  kbn = kmax + stw
90c4762a1bSJed Brown  allocate (f(dof, ib1:ibn, jb1:jbn, kb1:kbn))
91c4762a1bSJed Brown  allocate (grid(ndim, ib1:ibn, jb1:jbn, kb1:kbn))
92c4762a1bSJed Brown
93c4762a1bSJed Brown  ! Get xmin,xmax etc. for the local grid
94c4762a1bSJed Brown  ! The "coords" local vector here is borrowed, so we shall not destroy it.
95d8606c27SBarry Smith  PetscCallA(DMGetCoordinatesLocal(SolScal, coords, ierr))
96c4762a1bSJed Brown  ! We need a new DM for coordinate stuff since PETSc supports unstructured grid
97d8606c27SBarry Smith  PetscCallA(DMGetCoordinateDM(SolScal, CoordDM, ierr))
98c4762a1bSJed Brown  ! petsc_to_local and local_to_petsc are convenience functions, see
9977d968b7SBarry Smith  ! ex13f90auxmodule.F90.
100d8606c27SBarry Smith  call petsc_to_local(CoordDM, coords, array, grid, ndim, stw)
101c4762a1bSJed Brown  xmin = grid(1, 1, 1, 1)
102c4762a1bSJed Brown  ymin = grid(2, 1, 1, 1)
103c4762a1bSJed Brown  zmin = grid(3, 1, 1, 1)
104c4762a1bSJed Brown  xmax = grid(1, imax, jmax, kmax)
105c4762a1bSJed Brown  ymax = grid(2, imax, jmax, kmax)
106c4762a1bSJed Brown  zmax = grid(3, imax, jmax, kmax)
107d8606c27SBarry Smith  call local_to_petsc(CoordDM, coords, array, grid, ndim, stw)
108c4762a1bSJed Brown
109c4762a1bSJed Brown  ! Note that we never use xmin,xmax in this example, but the preceding way of
110c4762a1bSJed Brown  ! getting the local xmin,xmax etc. from PETSc for a structured uniform grid
111c4762a1bSJed Brown  ! is not documented in any other examples I could find.
112c4762a1bSJed Brown
113c4762a1bSJed Brown  ! Set up the time-stepping
114c4762a1bSJed Brown  t = 0.0
115c4762a1bSJed Brown  tend = 100.0
116c4762a1bSJed Brown  dt = 1e-3
117c4762a1bSJed Brown  maxstep = ceiling((tend - t)/dt)
118c4762a1bSJed Brown  ! Write output every second (of simulation-time)
119c4762a1bSJed Brown  nscreen = int(1.0/dt) + 1
120c4762a1bSJed Brown
121c4762a1bSJed Brown  ! Set initial condition
122ce78bad3SBarry Smith  PetscCallA(DMDAVecGetArray(SolScal, Lvec, array, ierr))
123c4762a1bSJed Brown  array(0, :, :, :) = 0.5
124c4762a1bSJed Brown  array(1, :, :, :) = 0.5
125ce78bad3SBarry Smith  PetscCallA(DMDAVecRestoreArray(SolScal, Lvec, array, ierr))
126c4762a1bSJed Brown
127c4762a1bSJed Brown  ! Initial set-up finished.
128c4762a1bSJed Brown  ! Time loop
129c4762a1bSJed Brown  maxstep = 5
130c4762a1bSJed Brown  do itime = 1, maxstep
131c4762a1bSJed Brown
132c4762a1bSJed Brown    ! Communicate such that everyone has the correct values in ghost cells
133d8606c27SBarry Smith    PetscCallA(DMLocalToLocalBegin(SolScal, Lvec, INSERT_VALUES, Lvec, ierr))
134d8606c27SBarry Smith    PetscCallA(DMLocalToLocalEnd(SolScal, Lvec, INSERT_VALUES, Lvec, ierr))
135c4762a1bSJed Brown
136c4762a1bSJed Brown    ! Get the old solution from the PETSc data structures
137d8606c27SBarry Smith    call petsc_to_local(SolScal, Lvec, array, f, dof, stw)
138c4762a1bSJed Brown
139c4762a1bSJed Brown    ! Do the time step
140c4762a1bSJed Brown    call forw_euler(t, dt, ib1, ibn, jb1, jbn, kb1, kbn, imax, jmax, kmax, dof, f, dfdt_vdp)
141c4762a1bSJed Brown    t = t + dt
142c4762a1bSJed Brown
143c4762a1bSJed Brown    ! Write result to screen (if main process and it's time to)
144c4762a1bSJed Brown    if (rank == 0 .and. mod(itime, nscreen) == 0) then
145c4762a1bSJed Brown      write (*, 101) t, f(1, 1, 1, 1), f(2, 1, 1, 1)
146c4762a1bSJed Brown    end if
147c4762a1bSJed Brown
148c4762a1bSJed Brown    ! Put our new solution in the PETSc data structures
149c4762a1bSJed Brown    call local_to_petsc(SolScal, Lvec, array, f, dof, stw)
150c4762a1bSJed Brown  end do
151c4762a1bSJed Brown
152c4762a1bSJed Brown  ! Deallocate and finalize
153d8606c27SBarry Smith  PetscCallA(DMRestoreLocalVector(SolScal, Lvec, ierr))
154d8606c27SBarry Smith  PetscCallA(DMDestroy(SolScal, ierr))
155c4762a1bSJed Brown  deallocate (f)
156c4762a1bSJed Brown  deallocate (grid)
157d8606c27SBarry Smith  PetscCallA(PetscFinalize(ierr))
158c4762a1bSJed Brown
159c4762a1bSJed Brown  ! Format for writing output to screen
160c4762a1bSJed Brown101 format(F5.1, 2F11.6)
161c4762a1bSJed Brown
162c4762a1bSJed Brownend program main
163c4762a1bSJed Brown
164c4762a1bSJed Brown!/*TEST
165c4762a1bSJed Brown!
166c4762a1bSJed Brown!   build:
167c4762a1bSJed Brown!     requires: !complex
168c4762a1bSJed Brown!     depends:  ex13f90aux.F90
169c4762a1bSJed Brown!
170c4762a1bSJed Brown!   test:
171c4762a1bSJed Brown!     nsize: 4
172c4762a1bSJed Brown!
173c4762a1bSJed Brown!TEST*/
174