Skip to content
Snippets Groups Projects
Commit 79c59e08 authored by Damian Alvarez's avatar Damian Alvarez
Browse files

Added BullMPI 4.1.4 for GCC 11.3.0

parent d5ff50a0
Branches
No related tags found
No related merge requests found
easyblock = 'SystemBundle'
name = 'BullMPI-settings'
version = '4.1'
versionsuffix = 'CUDA'
homepage = ''
description = '''This module loads the default BullMPI configuration. It relies on UCX and enables the UCX CUDA
transports.
'''
toolchain = SYSTEM
source_urls = []
sources = []
modextravars = {
'SLURM_MPI_TYPE': 'pspmix',
'OMPI_MCA_mca_base_component_show_load_errors': '1',
'OMPI_MCA_mpi_param_check': '1',
'OMPI_MCA_mpi_show_handle_leaks': '1',
'OMPI_MCA_mpi_warn_on_fork': '1',
# Disable uct for the time being due to:
# https://github.com/openucx/ucx/wiki/OpenMPI-and-OpenSHMEM-installation-with-UCX#running-open-mpi-with-ucx
# Also openib, since it is deprecated and should be substituted by the UCX support in the pml
'OMPI_MCA_btl': '^uct,openib',
'OMPI_MCA_btl_openib_allow_ib': '1',
'OMPI_MCA_bml_r2_show_unreach_errors': '0',
'OMPI_MCA_pml': 'ucx',
'OMPI_MCA_osc': '^rdma',
'OMPI_MCA_opal_abort_print_stack': '1',
'OMPI_MCA_opal_set_max_sys_limits': '1',
'OMPI_MCA_opal_event_include': 'epoll',
'OMPI_MCA_btl_openib_warn_default_gid_prefix': '0',
# OMPIO does not seem to work reliably on our system
'OMPI_MCA_io': 'romio321',
}
modluafooter = '''
if mode()=="load" then
if isloaded("UCX-settings/RC") then
try_load("UCX-settings/RC-CUDA")
elseif isloaded("UCX-settings/UD") then
try_load("UCX-settings/UD-CUDA")
elseif isloaded("UCX-settings/DC") then
try_load("UCX-settings/DC-CUDA")
elseif not isloaded("UCX-settings") then
try_load("UCX-settings/RC-CUDA")
end
end
'''
moduleclass = 'system'
easyblock = 'SystemBundle'
name = 'BullMPI-settings'
version = '4.1'
versionsuffix = 'UCX'
homepage = ''
description = 'This module loads the default BullMPI configuration. It relies on UCX.'
toolchain = SYSTEM
source_urls = []
sources = []
modextravars = {
'SLURM_MPI_TYPE': 'pspmix',
'OMPI_MCA_mca_base_component_show_load_errors': '1',
'OMPI_MCA_mpi_param_check': '1',
'OMPI_MCA_mpi_show_handle_leaks': '1',
'OMPI_MCA_mpi_warn_on_fork': '1',
# Disable uct for the time being due to:
# https://github.com/openucx/ucx/wiki/OpenMPI-and-OpenSHMEM-installation-with-UCX#running-open-mpi-with-ucx
# Also openib, since it is deprecated and should be substituted by the UCX support in the pml
'OMPI_MCA_btl': '^uct,openib',
'OMPI_MCA_btl_openib_allow_ib': '1',
'OMPI_MCA_bml_r2_show_unreach_errors': '0',
'OMPI_MCA_pml': 'ucx',
'OMPI_MCA_osc': '^rdma',
'OMPI_MCA_opal_abort_print_stack': '1',
'OMPI_MCA_opal_set_max_sys_limits': '1',
'OMPI_MCA_opal_event_include': 'epoll',
'OMPI_MCA_btl_openib_warn_default_gid_prefix': '0',
# OMPIO does not seem to work reliably on our system
'OMPI_MCA_io': 'romio321',
}
moduleclass = 'system'
easyblock = 'ConfigureMake'
name = 'BullMPI'
version = '4.1.4'
homepage = 'https://www.open-mpi.org/'
description = """BullMPI is an MPI runtime based on OpenMPI with specific optimizations"""
toolchain = {'name': 'GCC', 'version': '11.3.0'}
toolchainopts = {'pic': True}
sources = [f'{name}_{version}.tar']
checksums = ['afa4514cc203bb5e2646afea6826429f27b983979e4d8764402087e8a3bc3ed9']
osdependencies = [
# needed for --with-verbs
('libibverbs-dev', 'libibverbs-devel', 'rdma-core-devel'),
]
builddependencies = [
('pkgconf', '1.8.0'),
('Perl', '5.34.1'),
('Autotools', '20220317'),
]
dependencies = [
('zlib', '1.2.12'),
('hwloc', '2.7.1'),
('UCX', 'default'),
('CUDA', '11.7', '', SYSTEM),
('libevent', '2.1.12'),
('PMIx', '3.2.3'), # We rely on this version since it is the newest supported by psmgmt
('UCC', 'default'),
]
start_dir = f'openmpi-{version}'
unpack_options = f'&& rpm2cpio openmpi-bull-gnu-{version}-1.Bull.1.0.src.rpm | cpio -idmv && '
unpack_options += f'tar zxvf openmpi-{version}.tar.gz'
# We need to remove -march=native from CFLAGS, otherwise the compilation fails when trying to compile the op:avx
# component, due to lack of avx512 support. avx512 is safe to enable, even on non-avx512 architectures, since it is used
# in just one file, and the execution of that code is determined at runtime, depending on the processor ISA
# preconfigopts = 'CFLAGS="-O2 -ftree-vectorize -fno-math-errno -fPIC" '
# prebuildopts = preconfigopts
# General OpenMPI options
configopts = '--without-orte '
configopts += '--without-psm2 '
configopts += '--disable-oshmem '
configopts += '--with-ime=/opt/ddn/ime '
configopts += '--with-gpfs '
configopts += '--enable-shared '
configopts += '--with-hwloc=$EBROOTHWLOC ' # hwloc support
configopts += '--with-ucx=$EBROOTUCX '
configopts += '--with-verbs '
configopts += '--with-libevent=$EBROOTLIBEVENT '
configopts += '--with-cuda=$EBROOTCUDA '
# to enable SLURM integration (site-specific)
configopts += '--with-slurm --with-pmix=external --with-libevent=external --with-ompi-pmix-rte'
local_libs = ["mpi_mpifh", "mpi", "ompitrace", "open-pal", "open-rte"]
sanity_check_paths = {
'files': ["bin/%s" % local_binfile for local_binfile in ["ompi_info", "opal_wrapper"]] +
["lib/lib%s.%s" % (local_libfile, SHLIB_EXT) for local_libfile in local_libs] +
["include/%s.h" % x for x in ["mpi-ext", "mpif-config",
"mpif", "mpi", "mpi_portable_platform"]],
'dirs': [],
}
moduleclass = 'mpi'
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment