Skip to content
Snippets Groups Projects
Commit 4c47ab78 authored by Damian Alvarez's avatar Damian Alvarez
Browse files

Add NVHPC for 2020

parent 8cd4f054
Branches
No related tags found
No related merge requests found
##
# Copyright 2015-2019 Bart Oldeman
# Copyright 2016-2020 Forschungszentrum Juelich
#
# This file is triple-licensed under GPLv2 (see below), MIT, and
# BSD three-clause licenses.
#
# This file is part of EasyBuild,
# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en),
# with support of Ghent University (http://ugent.be/hpc),
# the Flemish Supercomputer Centre (VSC) (https://www.vscentrum.be),
# Flemish Research Foundation (FWO) (http://www.fwo.be/en)
# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en).
#
# https://github.com/easybuilders/easybuild
#
# EasyBuild is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation v2.
#
# EasyBuild is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>.
##
"""
EasyBuild support for installing NVIDIA HPC SDK compilers, based on the easyblock for PGI compilers
@author: Bart Oldeman (McGill University, Calcul Quebec, Compute Canada)
@author: Damian Alvarez (Forschungszentrum Juelich)
@author: Andreas Herten (Forschungszentrum Juelich)
"""
import os
import fileinput
import re
import stat
import sys
import easybuild.tools.environment as env
from distutils.version import LooseVersion
from easybuild.easyblocks.generic.packedbinary import PackedBinary
from easybuild.framework.easyconfig import CUSTOM
from easybuild.tools.filetools import adjust_permissions, write_file
from easybuild.tools.run import run_cmd
from easybuild.tools.modules import get_software_root
# contents for siterc file to make PGI/NVHPC pick up $LIBRARY_PATH
# cfr. https://www.pgroup.com/support/link.htm#lib_path_ldflags
SITERC_LIBRARY_PATH = """
# get the value of the environment variable LIBRARY_PATH
variable LIBRARY_PATH is environment(LIBRARY_PATH);
# split this value at colons, separate by -L, prepend 1st one by -L
variable library_path is
default($if($LIBRARY_PATH,-L$replace($LIBRARY_PATH,":", -L)));
# add the -L arguments to the link line
append LDLIBARGS=$library_path;
# also include the location where libm & co live on Debian-based systems
# cfr. https://github.com/easybuilders/easybuild-easyblocks/pull/919
append LDLIBARGS=-L/usr/lib/x86_64-linux-gnu;
"""
class EB_NVHPC(PackedBinary):
"""
Support for installing the NVIDIA HPC SDK (NVHPC) compilers
"""
@staticmethod
def extra_options():
extra_vars = {
'default_cuda_version': ["10.1", "CUDA Version to be used as default (10.2, 11.0, ...)", CUSTOM],
'compute_capability': ["70", "Compute Capability (70, 80, ...)", CUSTOM],
'module_byo_compilers': [False, "BYO Compilers: Remove compilers from module", CUSTOM],
'module_nvhpc_own_mpi': [False, "Add NVHPC's packaged OpenMPI to module", CUSTOM],
'module_add_math_libs': [False, "Add NVHPC's math libraries to module", CUSTOM],
'module_add_profilers': [False, "Add NVHPC's NVIDIA Profilers to module", CUSTOM],
'module_add_nccl': [False, "Add NVHPC's NCCL library to module", CUSTOM],
'module_add_nvshmem': [False, "Add NVHPC's NVSHMEM library to module", CUSTOM]
}
return PackedBinary.extra_options(extra_vars)
def __init__(self, *args, **kwargs):
"""Easyblock constructor, define custom class variables specific to NVHPC."""
super(EB_NVHPC, self).__init__(*args, **kwargs)
self.nvhpc_install_subdir = os.path.join('Linux_x86_64', self.version)
def install_step(self):
"""Install by running install command."""
nvhpc_env_vars = {
'NVHPC_INSTALL_DIR': self.installdir,
'NVHPC_SILENT': 'true',
'NVHPC_DEFAULT_CUDA': str(self.cfg['default_cuda_version']).lower(), #10.2, 11.0
'NVHPC_STDPAR_CUDACC': str(self.cfg['compute_capability']).lower(), #70, #80
}
cmd = "%s ./install" % ' '.join(['%s=%s' % x for x in sorted(nvhpc_env_vars.items())])
run_cmd(cmd, log_all=True, simple=True)
# make sure localrc uses GCC in PATH, not always the system GCC, and does not use a system g77 but gfortran
install_abs_subdir = os.path.join(self.installdir, self.nvhpc_install_subdir)
compilers_subdir = os.path.join(install_abs_subdir, "compilers")
makelocalrc_filename = os.path.join(compilers_subdir, "bin", "makelocalrc")
for line in fileinput.input(makelocalrc_filename, inplace='1', backup='.orig'):
line = re.sub(r"^PATH=/", r"#PATH=/", line)
sys.stdout.write(line)
cmd = "%s -x %s -g77 /" % (makelocalrc_filename, compilers_subdir)
run_cmd(cmd, log_all=True, simple=True)
# If an OS libnuma is NOT found, makelocalrc creates symbolic links to libpgnuma.so
# If we use the EB libnuma, delete those symbolic links to ensure they are not used
if get_software_root("numactl"):
for filename in ["libnuma.so", "libnuma.so.1"]:
path = os.path.join(compilers_subdir, "lib", filename)
if os.path.islink(path):
os.remove(path)
# install (or update) siterc file to make NVHPC consider $LIBRARY_PATHT
siterc_path = os.path.join(compilers_subdir, 'bin', 'siterc')
write_file(siterc_path, SITERC_LIBRARY_PATH, append=True)
self.log.info("Appended instructions to pick up $LIBRARY_PATH to siterc file at %s: %s",
siterc_path, SITERC_LIBRARY_PATH)
# The cuda nvvp tar file has broken permissions
adjust_permissions(self.installdir, stat.S_IWUSR, add=True, onlydirs=True)
def sanity_check_step(self):
"""Custom sanity check for NVHPC"""
prefix = self.nvhpc_install_subdir
custom_paths = {
'files': [os.path.join(prefix, 'compilers', 'bin', x) for x in ['nvc', 'nvc++', 'nvfortran', 'siterc']],
'dirs': [os.path.join(prefix, 'compilers', 'bin'), os.path.join(prefix, 'compilers', 'lib'),
os.path.join(prefix, 'compilers', 'include'), os.path.join(prefix, 'compilers', 'man')]
}
super(EB_NVHPC, self).sanity_check_step(custom_paths=custom_paths)
def _nvhpc_extended_components(self, dirs, basepath, env_vars__folders):
for env_var, folder in env_vars__folders.items():
if env_var not in dirs:
dirs[env_var] = []
dirs[env_var].append(os.path.join(basepath, folder))
def make_module_req_guess(self):
"""Prefix subdirectories in NVHPC install dir considered for environment variables defined in module file."""
dirs = super(EB_NVHPC, self).make_module_req_guess()
for key in dirs:
dirs[key] = [os.path.join(self.nvhpc_install_subdir, 'compilers', d) for d in dirs[key]]
# $CPATH should not be defined in module for NVHPC, it causes problems
# cfr. https://github.com/easybuilders/easybuild-easyblocks/issues/830
if 'CPATH' in dirs:
self.log.info("Removing $CPATH entry: %s", dirs['CPATH'])
del dirs['CPATH']
# EasyBlock Options:
## BYO Compilers allows for using NVHPC's libraries and tools with other, external compilers
if self.cfg['module_byo_compilers'] is True:
if 'PATH' in dirs:
del dirs["PATH"]
## NVHPC is shipped with a compiled OpenMPI installation; enable it by setting the according environment variables
if self.cfg['module_nvhpc_own_mpi'] is True:
self.nvhpc_mpi_basedir = os.path.join(self.nvhpc_install_subdir, "comm_libs", "mpi")
self._nvhpc_extended_components(dirs, self.nvhpc_mpi_basedir, {'PATH': 'bin', 'CPATH': 'include', 'LD_LIBRARY_PATH': 'lib'})
## NVHPC is shipped with math libraries in a dedicated folder; enable it by setting the according environment variables
if self.cfg['module_add_math_libs'] is True:
self.nvhpc_math_basedir = os.path.join(self.nvhpc_install_subdir, "math_libs")
self._nvhpc_extended_components(dirs, self.nvhpc_math_basedir, {'CPATH': 'include', 'LD_LIBRARY_PATH': 'lib64'})
## NVHPC is shipped with NVIDIA's GPU profilers; enable them by setting the according environment variables
if self.cfg['module_add_profilers'] is True:
self.nvhpc_profilers_basedir = os.path.join(self.nvhpc_install_subdir, "profilers")
self._nvhpc_extended_components(dirs, self.nvhpc_profilers_basedir, {'PATH': 'Nsight_Compute'})
self._nvhpc_extended_components(dirs, self.nvhpc_profilers_basedir, {'PATH': 'Nsight_Systems/bin'})
## NVHPC is shipped with NCCL; enable it by setting the according environment variables
if self.cfg['module_add_nccl'] is True:
self.nvhpc_nccl_basedir = os.path.join(self.nvhpc_install_subdir, "comm_libs", "nccl")
self._nvhpc_extended_components(dirs, self.nvhpc_nccl_basedir, {'CPATH': 'include', 'LD_LIBRARY_PATH': 'lib'})
## NVHPC is shipped with NVSHMEM; enable it by setting the according environment variables
if self.cfg['module_add_nvshmem'] is True:
self.nvhpc_nvshmem_basedir = os.path.join(self.nvhpc_install_subdir, "comm_libs", "nvshmem")
self._nvhpc_extended_components(dirs, self.nvhpc_nvshmem_basedir, {'CPATH': 'include', 'LD_LIBRARY_PATH': 'lib'})
return dirs
def make_module_extra(self):
"""Add environment variable for NVHPC location"""
txt = super(EB_NVHPC, self).make_module_extra()
txt += self.module_generator.set_environment('NVHPC', self.installdir)
return txt
name = 'NVHPC'
version = '20.7'
local_gccver = '9.3.0'
versionsuffix = '-GCC-%s' % local_gccver
homepage = 'https://developer.nvidia.com/hpc-sdk/'
description = """C, C++ and Fortran compilers included with the NVIDIA HPC SDK (previously: PGI)"""
site_contacts = 'd.alvarez@fz-juelich.de'
toolchain = SYSTEM
sources = ['nvhpc_2020_%(version_major)s%(version_minor)s_Linux_x86_64_cuda_multi.tar.gz']
dependencies = [
('GCCcore', local_gccver),
('binutils', '2.34', '', ('GCCcore', local_gccver)),
('CUDA', '11.0', '', SYSTEM),
# This is necessary to avoid cases where just libnuma.so.1 is present in the system and -lnuma fails
('numactl', '2.0.13', '', SYSTEM)
]
default_cuda_version = "11.0"
compute_capability = "70"
# Options to add/remove things to/from environment module
module_byo_compilers = False # Remove compilers from PATH (Bring-your-own compilers)
module_nvhpc_own_mpi = False # Add NVHPC's own pre-compiled OpenMPI
module_add_math_libs = False # Add NVHPC's math libraries (which should be there from CUDA anyway)
module_add_profilers = False # Add NVHPC's NVIDIA Profilers
module_add_nccl = False # Add NVHPC's NCCL library
module_add_nvshmem = False # Add NVHPC's NVSHMEM library
# We use a HMNS, so let's enforce a unique compiler
modluafooter = '''
family("compiler")
add_property("arch","gpu")
'''
# Always do a recursive unload on compilers
recursive_module_unload = True
# this bundle serves as a compiler-only toolchain, so it should be marked as compiler (important for HMNS)
moduleclass = 'compiler'
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment