xref: /petsc/src/dm/tutorials/ex13f90.F90 (revision bfe80ac4a46d58cb7760074b25f5e81b2f541d8a)
1program main
2!
3! This example intends to show how DMDA is used to solve a PDE on a decomposed
4! domain. The equation we are solving is not a PDE, but a toy example: van der
5! Pol's 2-variable ODE duplicated onto a 3D grid:
6! dx/dt = y
7! dy/dt = mu(1-x**2)y - x
8!
9! So we are solving the same equation on all grid points, with no spatial
10! dependencies. Still we tell PETSc to communicate (stencil width >0) so we
11! have communication between different parts of the domain.
12!
13! The example is structured so that one can replace the RHS function and
14! the forw_euler routine with a suitable RHS and a suitable time-integration
15! scheme and make little or no modifications to the DMDA parts. In particular,
16! the "inner" parts of the RHS and time-integration do not "know about" the
17! decomposed domain.
18!
19!     See:     http://dx.doi.org/10.6084/m9.figshare.1368581
20!
21!     Contributed by Aasmund Ervik (asmunder at pvv.org)
22!
23
24  use ex13f90auxmodule
25
26#include <petsc/finclude/petscdmda.h>
27  use petscdmda
28
29  PetscErrorCode   ierr
30  PetscMPIInt      rank,size
31  MPI_Comm         comm
32  Vec              Lvec,coords
33  DM               SolScal,CoordDM
34  DMBoundaryType b_x,b_y,b_z
35  PetscReal, pointer :: array(:,:,:,:)
36  PetscReal :: t,tend,dt,xmin,xmax,ymin,ymax,zmin,zmax,xgmin,xgmax,ygmin,ygmax,zgmin,zgmax
37  PetscReal, allocatable :: f(:,:,:,:), grid(:,:,:,:)
38  PetscInt :: i,j,k,igmax,jgmax,kgmax,ib1,ibn,jb1,jbn,kb1,kbn,imax,jmax,kmax,itime,maxstep,nscreen,dof,stw,ndim
39
40  ! Fire up PETSc:
41  PetscCallA(PetscInitialize(ierr))
42
43  comm = PETSC_COMM_WORLD
44  PetscCallMPIA(MPI_Comm_rank(comm,rank,ierr))
45  PetscCallMPIA(MPI_Comm_size(comm,size,ierr))
46  if (rank == 0) then
47    write(*,*) 'Hi! We are solving van der Pol using ',size,' processes.'
48    write(*,*) ' '
49    write(*,*) '  t     x1         x2'
50  endif
51
52  ! Set up the global grid
53  igmax = 50
54  jgmax = 50
55  kgmax = 50
56  xgmin = 0.0
57  ygmin = 0.0
58  zgmin = 0.0
59  xgmax = 1.0
60  ygmax = 1.0
61  zgmax = 1.0
62  stw = 1 ! stencil width
63  dof = 2 ! number of variables in this DA
64  ndim = 3 ! 3D code
65
66  ! Get the BCs and create the DMDA
67  call get_boundary_cond(b_x,b_y,b_z)
68  PetscCallA(DMDACreate3d(comm,b_x,b_y,b_z,DMDA_STENCIL_STAR,igmax,jgmax,kgmax,PETSC_DECIDE,PETSC_DECIDE,PETSC_DECIDE,dof,stw,PETSC_NULL_INTEGER_ARRAY,PETSC_NULL_INTEGER_ARRAY,PETSC_NULL_INTEGER_ARRAY,SolScal,ierr))
69  PetscCallA(DMSetFromOptions(SolScal,ierr))
70  PetscCallA(DMSetUp(SolScal,ierr))
71
72  ! Set global coordinates, get a global and a local work vector
73  PetscCallA(DMDASetUniformCoordinates(SolScal,xgmin,xgmax,ygmin,ygmax,zgmin,zgmax,ierr))
74  PetscCallA(DMCreateLocalVector(SolScal,Lvec,ierr))
75
76  ! Get ib1,imax,ibn etc. of the local grid.
77  ! Our convention is:
78  ! the first local ghost cell is ib1
79  ! the first local       cell is 1
80  ! the last  local       cell is imax
81  ! the last  local ghost cell is ibn.
82  !
83  ! i,j,k must be in this call, but are not used
84  PetscCallA(DMDAGetCorners(SolScal,i,j,k,imax,jmax,kmax,ierr))
85  ib1=1-stw
86  jb1=1-stw
87  kb1=1-stw
88  ibn=imax+stw
89  jbn=jmax+stw
90  kbn=kmax+stw
91  allocate(f(dof,ib1:ibn,jb1:jbn,kb1:kbn))
92  allocate(grid(ndim,ib1:ibn,jb1:jbn,kb1:kbn))
93
94  ! Get xmin,xmax etc. for the local grid
95  ! The "coords" local vector here is borrowed, so we shall not destroy it.
96  PetscCallA(DMGetCoordinatesLocal(SolScal,coords,ierr))
97  ! We need a new DM for coordinate stuff since PETSc supports unstructured grid
98  PetscCallA(DMGetCoordinateDM(SolScal,CoordDM,ierr))
99  ! petsc_to_local and local_to_petsc are convenience functions, see
100  ! ex13f90auxmodule.F90.
101  call petsc_to_local(CoordDM,coords,array,grid,ndim,stw)
102  xmin=grid(1,1,1,1)
103  ymin=grid(2,1,1,1)
104  zmin=grid(3,1,1,1)
105  xmax=grid(1,imax,jmax,kmax)
106  ymax=grid(2,imax,jmax,kmax)
107  zmax=grid(3,imax,jmax,kmax)
108  call local_to_petsc(CoordDM,coords,array,grid,ndim,stw)
109
110  ! Note that we never use xmin,xmax in this example, but the preceding way of
111  ! getting the local xmin,xmax etc. from PETSc for a structured uniform grid
112  ! is not documented in any other examples I could find.
113
114  ! Set up the time-stepping
115  t = 0.0
116  tend = 100.0
117  dt = 1e-3
118  maxstep=ceiling((tend-t)/dt)
119  ! Write output every second (of simulation-time)
120  nscreen = int(1.0/dt)+1
121
122  ! Set initial condition
123  PetscCallA(DMDAVecGetArray(SolScal,Lvec,array,ierr))
124  array(0,:,:,:) = 0.5
125  array(1,:,:,:) = 0.5
126  PetscCallA(DMDAVecRestoreArray(SolScal,Lvec,array,ierr))
127
128  ! Initial set-up finished.
129  ! Time loop
130  maxstep = 5
131  do itime=1,maxstep
132
133    ! Communicate such that everyone has the correct values in ghost cells
134    PetscCallA(DMLocalToLocalBegin(SolScal,Lvec,INSERT_VALUES,Lvec,ierr))
135    PetscCallA(DMLocalToLocalEnd(SolScal,Lvec,INSERT_VALUES,Lvec,ierr))
136
137    ! Get the old solution from the PETSc data structures
138    call petsc_to_local(SolScal,Lvec,array,f,dof,stw)
139
140    ! Do the time step
141    call forw_euler(t,dt,ib1,ibn,jb1,jbn,kb1,kbn,imax,jmax,kmax,dof,f,dfdt_vdp)
142    t=t+dt
143
144    ! Write result to screen (if main process and it's time to)
145    if (rank == 0 .and. mod(itime,nscreen) == 0) then
146      write(*,101) t,f(1,1,1,1),f(2,1,1,1)
147    endif
148
149    ! Put our new solution in the PETSc data structures
150    call local_to_petsc(SolScal,Lvec,array,f,dof,stw)
151  end do
152
153  ! Deallocate and finalize
154  PetscCallA(DMRestoreLocalVector(SolScal,Lvec,ierr))
155  PetscCallA(DMDestroy(SolScal,ierr))
156  deallocate(f)
157  deallocate(grid)
158  PetscCallA(PetscFinalize(ierr))
159
160  ! Format for writing output to screen
161101 format(F5.1,2F11.6)
162
163end program main
164
165!/*TEST
166!
167!   build:
168!     requires: !complex
169!     depends:  ex13f90aux.F90
170!
171!   test:
172!     nsize: 4
173!
174!TEST*/
175