xref: /petsc/config/examples/arch-olcf-spock.py (revision ccb4e88a40f0b86eaeca07ff64c64e4de2fae686)
1#!/usr/bin/python3
2
3#As suggested from OLCF staff this is my rc file
4#
5#module load craype-accel-amd-gfx908
6#module load PrgEnv-cray
7#module load rocm
8#export PE_MPICH_GTL_DIR_amd_gfx908="-L/opt/cray/pe/mpich/8.1.4/gtl/lib"
9#export PE_MPICH_GTL_LIBS_amd_gfx908="-lmpi_gtl_hsa"
10#export MPIR_CVAR_GPU_EAGER_DEVICE_MEM=0
11#export MPICH_GPU_SUPPORT_ENABLED=1
12#export MPICH_SMP_SINGLE_COPY_MODE=CMA
13#
14
15if __name__ == '__main__':
16  import sys
17  import os
18  sys.path.insert(0, os.path.abspath('config'))
19  import configure
20  configure_options = [
21    # When we compile HIP code in PETSc, we eventually include mpi.h.
22    # MPI include folder is hidden by cc/CC and PETSc does not detect it
23    '--HIPPPFLAGS=-I'+os.environ['MPICH_DIR']+'include',
24    # Needed by MPICH:
25    # ld.lld: error: /opt/cray/pe/mpich/8.1.4/gtl/lib/libmpi_gtl_hsa.so: undefined reference to hsa_amd_memory_pool_allocate
26    # and many others
27    '--LDFLAGS=-L'+os.environ['ROCM_PATH']+'lib -lhsa-runtime64',
28    '--PETSC_ARCH=arch-spock-debug',
29    '--download-magma=1',
30    '--with-64-bit-indices=0',
31    '--with-cc=cc',
32    '--with-cxx=CC',
33    '--with-debugging=1',
34    '--with-fc=ftn',
35    '--with-fortran-bindings=0',
36    '--with-hip=1',
37    '--with-hipc=hipcc',
38    '--with-magma-fortran-bindings=0',
39    '--with-magma-gputarget=gfx908',
40    '--with-mpiexec=srun -p ecp -N 1 -A csc314 -t 00:10:00',
41    '--with-precision=double',
42    '--with-scalar-type=real',
43  ]
44  configure.petsc_configure(configure_options)
45