Skip to content
Snippets Groups Projects
Commit d839965d authored by Alexandre Strube's avatar Alexandre Strube
Browse files

Merge branch '2020' of...

parents cef89839 53b29ada
Branches
No related tags found
No related merge requests found
Showing
with 691 additions and 9 deletions
import grp
import os
from easybuild.tools.run import run_cmd
from easybuild.tools.config import build_option
from easybuild.tools.config import install_path
from easybuild.tools.build_log import EasyBuildError, print_warning
from easybuild.tools.toolchain.toolchain import SYSTEM_TOOLCHAIN_NAME
from easybuild.toolchains.compiler.systemcompiler import TC_CONSTANT_SYSTEM
SUPPORTED_COMPILERS = ["GCC", "iccifort", "PGI", "GCCcore"]
SUPPORTED_MPIS = ["impi", "psmpi", "OpenMPI", "MVAPICH2"]
# Maintain toplevel list for easy use of --try-toolchain
SUPPORTED_TOPLEVEL_TOOLCHAIN_FAMILIES = [
"intel",
"intel-para",
"iomkl",
"gpsmkl",
"gomkl",
"pmvmklc",
"gmvmklc",
]
# Could potentially make a dictionary of names and supported versions here but that is
# probably overkill
SUPPORTED_TOOLCHAIN_FAMILIES = (
SUPPORTED_COMPILERS
+ ["gcccoremkl", "gpsmpi", "ipsmpi", "iimpi", "iompi", "gmvapich2c", "pmvapich2c"]
+ SUPPORTED_TOPLEVEL_TOOLCHAIN_FAMILIES
)
# Also maintain a list of CUDA enabled compilers
CUDA_ENABLED_TOOLCHAINS = ["pmvmklc", "gmvmklc", "gmvapich2c", "pmvapich2c"]
# Use this for a heuristic to see if the easyconfig comes from the Golden Repo
GOLDEN_REPO = "Golden_Repo"
# Some modules should use modaltsoftname by default
REQUIRE_MODALTSOFTNAME = {
"impi": "IntelMPI",
"psmpi": "ParaStationMPI",
"iccifort": "Intel",
}
def parse_hook(ec, *args, **kwargs):
"""Custom parse hook to manage installations intended for JSC systems."""
ec_dict = ec.asdict()
# Compilers are are a family (in the Lmod sense)
if ec.name in SUPPORTED_COMPILERS:
key = "modluafooter"
value = 'family("compiler")'
if key in ec_dict:
if not value in ec_dict[key]:
ec[key] = "\n".join([ec[key], value])
else:
ec[key] = value
ec.log.info("[parse hook] Injecting Lmod compiler family")
# Supported compilers should also be recursively unloaded
key = "recursive_module_unload"
if not key in ec_dict or ec_dict[key] is None:
ec[key] = True
ec.log.info(
"[parse hook] Injecting recursive module unloading for supported compiler"
)
# MPI are are a family (in the Lmod sense)
if ec.name in SUPPORTED_MPIS:
key = "modluafooter"
value = 'family("mpi")'
if key in ec_dict:
if not value in ec_dict[key]:
ec[key] = "\n".join([ec[key], value])
else:
ec[key] = value
ec.log.info("[parse hook] Injecting Lmod mpi family")
# Check if we need to use 'modaltsoftname'
if ec.name in REQUIRE_MODALTSOFTNAME:
key = "modaltsoftname"
if not key in ec_dict or ec_dict[key] is None:
ec[key] = REQUIRE_MODALTSOFTNAME[ec.name]
ec.log.info(
"[parse hook] Injecting modaltsoftname '%s' for '%s'",
key,
REQUIRE_MODALTSOFTNAME[ec.name],
)
# Check if CUDA is in the dependencies, if so add the GPU Lmod tag
if (
"CUDA" in [dep[0] for dep in iter(ec_dict["dependencies"])]
or ec_dict["toolchain"]["name"] in CUDA_ENABLED_TOOLCHAINS
):
key = "modluafooter"
value = 'add_property("arch","gpu")'
if key in ec_dict:
if not value in ec_dict[key]:
ec[key] = "\n".join([ec_dict[key], value])
else:
ec[key] = value
ec.log.info("[parse hook] Injecting gpu as Lmod arch property")
# Check where installations are going to go and add appropriate site contact
# not sure of a fool-proof way to do this, let's just try a heuristic
site_contacts = None
if "stages" in install_path().lower():
users_groups = [grp.getgrgid(g).gr_name for g in os.getgroups()]
if any(group in user_groups for group in ["swmanage", "software"]):
site_contacts = "sc@fz-juelich.de"
if site_contacts is None:
# Inject the user
site_contacts = os.getenv("USER")
# Tag the build as a user build
key = "modluafooter"
value = 'add_property("build","user")'
if key in ec_dict:
if not value in ec_dict[key]:
ec[key] = "\n".join([ec_dict[key], value])
else:
ec[key] = value
ec.log.info("[parse hook] Injecting user as Lmod build property")
if site_contacts:
key = "site_contacts"
value = site_contacts
if key in ec_dict:
if ec_dict[key] is not None and value not in ec_dict[key]:
value = ",".join([ec_dict[key], value])
ec[key] = value
ec.log.info("[parse hook] Injecting contact %s", value)
# If we are parsing we are not searching, in this case if the easyconfig is
# located in the search path, warn that it's dependencies will (most probably)
# not be resolved
if build_option("robot"):
search_paths = build_option("search_paths") or []
robot_paths = list(set(build_option("robot_path") + build_option("robot")))
if ec.path:
ec_dir_path = os.path.dirname(os.path.abspath(ec.path))
else:
ec_dir_path = ''
if any(search_path in ec_dir_path for search_path in search_paths) and not any(
robot_path in ec_dir_path for robot_path in robot_paths
):
raise EasyBuildError(
"\nYou are attempting to install an easyconfig distributed with "
"EasyBuild but are not properly configured to resolve dependencies "
"for this case. Please add additonal options:\n"
" eb --robot=$EASYBUILD_ROBOT:$EBROOTEASYBUILD/easybuild/easyconfigs --try-update-deps ...."
)
def pre_ready_hook(self, *args, **kwargs):
"When we are building something, do some checks for bad behaviour"
ec = self.cfg
# Grab name, path, toolchain, install path and check if we are installing
# GCCcore/MPI
name = ec["name"]
path_to_ec = os.path.abspath(ec.path)
toolchain = ec["toolchain"]
is_gcccore = ec["name"] == "GCCcore"
is_mpi = ec["moduleclass"] == "mpi" or name in SUPPORTED_MPIS
# Don't let people use unsupported toolchains (by default)
override_toolchain_check = os.getenv("JSC_OVERRIDE_TOOLCHAIN_CHECK")
if not override_toolchain_check:
toolchain_name = toolchain["name"]
if not toolchain_name in SUPPORTED_TOOLCHAIN_FAMILIES:
stage = os.getenv("STAGE", default=None)
if stage:
# Clean things up if it is a Devel stage
stage = stage.replace("Devel-", "")
else:
stage = "<TOOLCHAIN_VERSION>"
print_warning(
"\nYou are attempting to install software with an unsupported "
"toolchain (%s), please use additional arguments to map this to a supported"
" toolchain:\n"
" eb --try-toolchain=<SUPPORTED_TOOLCHAIN>,%s --try-update-deps ...\n"
"where <SUPPORTED_TOOLCHAIN> comes from the list %s\n"
"(if you really know what you are doing, you can override this "
"behaviour by setting the %s environment variable)\n\n"
"...exiting",
toolchain_name,
stage,
SUPPORTED_TOPLEVEL_TOOLCHAIN_FAMILIES,
"JSC_OVERRIDE_TOOLCHAIN_CHECK",
)
exit(1)
# Don't let people install GCCcore since this probably won't work and will lead them
# to reinstall most of our stack. Don't advertise that this can be overridden, only
# experts should know that.
override_gcccore_check = os.getenv("JSC_OVERRIDE_GCCCORE_CHECK")
if not override_gcccore_check:
if is_gcccore and not "stages" in install_path().lower():
print_warning(
"\nYou are attempting to install GCCcore (%s) into a non-system "
"location (%s), this won't work as expected without additional effort "
"and is likely to lead to building a whole stack of dependencies even "
"for simple software. Please contact sc@fz-juelich.de if you wish to "
"discuss this further.\n\n"
"...exiting",
path_to_ec,
install_path(),
)
exit(1)
# Don't let people install a non-JSC MPI (and don't advertise that this can be
# overridden, only experts should know that)
override_mpi_check = os.getenv("JSC_OVERRIDE_MPI_CHECK")
if not override_mpi_check:
if is_mpi and GOLDEN_REPO not in path_to_ec:
print_warning(
"\nYou are attempting to install a non-system MPI implementation (%s), "
"this is very likely to lead to severe performance degradation. Please "
"contact sc@fz-juelich.de if you wish to discuss this further.\n\n"
"...exiting",
path_to_ec,
)
exit(1)
def end_hook(*args, **kwargs):
# If the user is part of the development group and the installation is systemwide,
# rebuild the system cache
"Refresh Lmod's cache"
if "stages" in install_path().lower():
user = os.getenv("USER")
if user == "swmanage":
cmd = (
"/gpfs/software/juwels/configs/update_system_module_cache.sh"
) # Need to make this generic
if os.path.isfile(cmd):
print("== Refreshing Lmod's cache...")
run_cmd(cmd, log_all=True)
else:
# Otherwise do nothing, no need to build a user cache, it's very unlikely they
# will have loads of modules
pass
...@@ -25,7 +25,7 @@ builddependencies = [ ...@@ -25,7 +25,7 @@ builddependencies = [
] ]
dependencies = [ dependencies = [
('Java', '1.8', '', True), ('Java', '1.8', '', SYSTEM),
('Python', '3.8.5'), ('Python', '3.8.5'),
] ]
......
easyblock = 'ConfigureMake'
name = 'ARPACK-NG'
version = '3.7.0'
versionsuffix = '-nompi'
homepage = 'http://forge.scilab.org/index.php/p/arpack-ng/'
description = """ARPACK-NG is a collection of Fortran77 subroutines designed to solve large scale eigenvalue problems.
libarpack.a and libparpack.a have been installed in $EBROOTARPACKMINNG.
In addition the variables ARPACK_ROOT, ARPACK_LIB, PARPACK_ROOT, and PARPACK_LIB are set.
"""
examples = 'Examples can be found in $ARPACK_ROOT/EXAMPLES'
site_contacts = 'I. Gutheil (i.gutheil@fz-juelich.de)'
toolchain = {'name': 'gcccoremkl', 'version': '9.3.0-2020.2.254'}
toolchainopts = {'opt': True, 'optarch': True, 'pic': True}
source_urls = ['https://github.com/opencollab/arpack-ng/archive/']
sources = ["%(version)s.tar.gz"]
checksums = ['972e3fc3cd0b9d6b5a737c9bf6fd07515c0d6549319d4ffb06970e64fa3cc2d6']
patches = [
'ARPACK-NG-%(version)s-install-arpack-examples_gpsmkl.patch'
]
builddependencies = [
('binutils', '2.34'),
('Autotools', '20200321'),
('pkg-config', '0.29.2')
]
# We hide it since this should be used just for Jupyter and the MPI version should be preferred for normal cases
hidden = True
preconfigopts = 'sh bootstrap &&'
configopts = '--with-pic --with-blas="$LIBBLAS" --with-lapack="$LIBLAPACK"'
postinstallcmds = [
"cp -r EXAMPLES %(installdir)s/EXAMPLES",
]
sanity_check_paths = {
'files': ["lib/libarpack.a", "lib/libarpack.%s" % SHLIB_EXT, ],
'dirs': []
}
modextravars = {
'ARPACK_ROOT': '%(installdir)s',
'ARPACK_LIB': '%(installdir)s/lib',
}
moduleclass = 'numlib'
easyblock = 'PackedBinary'
name = 'ant'
version = '1.10.9'
versionsuffix = '-Java-%(javaver)s'
homepage = 'https://ant.apache.org/'
description = """Apache Ant is a Java library and command-line tool whose mission is to drive processes described in
build files as targets and extension points dependent upon each other. The main known usage of Ant is the build of
Java applications."""
site_contacts = 'j.goebbert@fz-juelich.de'
toolchain = SYSTEM
source_urls = ['https://archive.apache.org/dist/ant/binaries/']
sources = ['apache-%(name)s-%(version)s-bin.tar.gz']
checksums = ['5f8a85ddee6effe79163aa54c7bef6b60305e37200dedc1986437fb5c072a9f3']
dependencies = [('Java', '15')]
sanity_check_paths = {
'files': ['bin/ant', 'lib/ant.jar'],
'dirs': [],
}
modextravars = {'ANT_HOME': '%(installdir)s'}
moduleclass = 'devel'
easyblock = 'Bundle'
name = 'basemap'
version = '1.2.2'
versionsuffix = '-Python-%(pyver)s'
homepage = 'http://matplotlib.org/basemap/'
description = """Basemap is a Python matplotlib toolkit for plotting data on maps.
This is the last version of Basemap. Please move development efforts over to Cartopy!
"""
site_contacts = 'a.strube@fz-juelich.de'
toolchain = {'name': 'gcccoremkl', 'version': '9.3.0-2020.2.254'}
dependencies = [
('Python', '3.8.5'),
('SciPy-Stack', '2020', versionsuffix),
('GEOS', '3.8.1', versionsuffix),
('PROJ', '7.1.0'),
]
# this is a bundle of Python packages
exts_defaultclass = 'PythonPackage'
exts_filter = ('python -c "import %(ext_name)s"', '')
exts_download_dep_fail = True
exts_list = [
('pyshp', '2.1.2', {
'source_urls': ['https://pypi.python.org/packages/source/p/pyshp'],
'modulename': 'shapefile',
}),
('pyproj', '2.6.1.post1', { # PyProj 3 needs PROJ 7.2.0, which was released after PROJ went into production
'source_urls': ['https://pypi.python.org/packages/source/p/pyproj'],
'prebuildopts': 'PROJ_DIR="$EBROOTPROJ" ',
'preinstallopts': 'PROJ_DIR="$EBROOTPROJ" ',
}),
('basemap', '1.2.2', {
'source_urls': ['https://github.com/matplotlib/basemap/archive/'],
'source_tmpl': 'v%(version)srel.tar.gz',
'prebuildopts': 'GEOS_DIR="$EBROOTGEOS" ',
'preinstallopts': 'GEOS_DIR="$EBROOTGEOS" ',
'modulename': 'mpl_toolkits.basemap',
}),
]
modextrapaths = {'PYTHONPATH': ['lib/python%(pyshortver)s/site-packages']}
moduleclass = 'vis'
easyblock = 'PythonBundle'
name = 'Cartopy'
version = '0.18.0'
versionsuffix = '-Python-%(pyver)s'
homepage = 'https://scitools.org.uk/cartopy/docs/latest/'
description = """Cartopy is a Python package designed to make drawing maps for data analysis and visualisation easy."""
site_contacts = 'a.strube@fz-juelich.de'
toolchain = {'name': 'gpsmpi', 'version': '2020'}
dependencies = [
('Python', '3.8.5'),
('Fiona', '1.8.16', versionsuffix),
('GDAL', '3.1.2', versionsuffix),
('GEOS', '3.8.1', versionsuffix),
('SciPy-Stack', '2020', versionsuffix, ('gcccoremkl', '9.3.0-2020.2.254')),
('pyproj', '2.6.1.post1', versionsuffix),
('Shapely', '1.7.1', versionsuffix),
('PROJ', '7.1.0'),
]
use_pip = True
sanity_pip_check = True
exts_default_options = {'source_urls': [PYPI_SOURCE]}
exts_list = [
('OWSLib', '0.20.0', {
'checksums': ['334988857b260c8cdf1f6698d07eab61839c51acb52ee10eed1275439200a40e'],
}),
('pyepsg', '0.4.0', {
'checksums': ['2d08fad1e7a8b47a90a4e43da485ba95705923425aefc4e2a3efa540dbd470d7'],
}),
('pykdtree', '1.3.1', {
'checksums': ['0d49d3bbfa0366dbe29176754ec86df75114a25525b530dcbbb75d3ac4c263e9'],
}),
('pyshp', '2.1.0', {
'modulename': 'shapefile',
'checksums': ['e65c7f24d372b97d0920b864bbeb78322bb37b83f2606e2a2212631d5d51e5c0'],
}),
(name, version, {
'checksums': ['7ffa317e8f8011e0d965a3ef1179e57a049f77019867ed677d49dcc5c0744434'],
}),
]
moduleclass = 'geo'
easyblock = 'CMakeMake'
name = 'cppcheck'
version = '2.2'
homepage = 'http://cppcheck.sourceforge.net/'
description = """Cppcheck is a static analysis tool for C/C++ code"""
site_contacts = 'a.strube@fz-juelich.de'
toolchain = {'name': 'GCCcore', 'version': '9.3.0'}
source_urls = [SOURCEFORGE_SOURCE]
sources = ['%(name)s-%(version)s.tar.bz2']
dependencies = [
('binutils', '2.34'),
('Qt5', '5.14.2'),
('PCRE', '8.44'),
('CMake', '3.18.0'),
]
configopts = '-DUSE_Z3:BOOL=OFF'
sanity_check_paths = {
'files': ['bin/cppcheck'],
'dirs': [],
}
moduleclass = 'lang'
easyblock = 'Bundle'
name = 'distributed'
version = '2.30.1'
versionsuffix = '-Python-%(pyver)s'
homepage = 'https://distributed.readthedocs.io/'
description = """Dask.distributed is a lightweight library for distributed computing in Python.
It extends both the concurrent.futures and dask APIs to moderate sized clusters."""
toolchain = {'name': 'gcccoremkl', 'version': '9.3.0-2020.2.254'}
site_contacts = 'a.strube@fz-juelich.de'
# this is a bundle of Python packages
exts_defaultclass = 'PythonPackage'
dependencies = [
('Python', '3.8.5'),
('dask', '2.22.0', versionsuffix),
('SciPy-Stack', '2020', versionsuffix, ('gcccoremkl', '9.3.0-2020.2.254')),
]
exts_default_options = {
'source_urls': [PYPI_SOURCE],
'sanity_pip_check': True,
}
exts_list = [
('zict', '2.0.0', {
'source_urls': ['https://pypi.python.org/packages/source/z/zict'],
'source_tmpl': 'zict-%(version)s.tar.gz',
}),
('HeapDict', '1.0.1', {
'modulename': 'heapdict',
'source_urls': ['https://pypi.python.org/packages/source/H/HeapDict'],
}),
('tornado', '5.0.2', {
'source_urls': ['https://pypi.python.org/packages/source/t/tornado'],
}),
('tblib', '1.7.0', {
'source_urls': ['https://pypi.python.org/packages/source/t/tblib'],
}),
('msgpack', '1.0.0', {
'modulename': 'msgpack',
'source_urls': ['https://pypi.python.org/packages/source/m/msgpack'],
}),
('sortedcontainers', '2.3.0', {
'source_urls': ['https://pypi.python.org/packages/source/s/sortedcontainers'],
}),
(name, version, {
'source_urls': ['https://pypi.python.org/packages/source/d/distributed'],
}),
]
sanity_check_paths = {
'files': ['bin/dask-scheduler', 'bin/dask-ssh', 'bin/dask-worker'],
'dirs': ['lib/python%(pyshortver)s/site-packages'],
}
modextrapaths = {'PYTHONPATH': ['lib/python%(pyshortver)s/site-packages']}
moduleclass = 'tools'
name = 'FFTW'
version = '3.3.8'
versionsuffix = '-nompi'
homepage = 'http://www.fftw.org'
description = """FFTW is a C subroutine library for computing the discrete
Fourier transform (DFT) in one or more dimensions, of arbitrary input size,
and of both real and complex data."""
site_contacts = 'i.gutheil@fz-juelich.de'
toolchain = {'name': 'GCCcore', 'version': '9.3.0'}
toolchainopts = {'pic': True}
source_urls = [homepage]
sources = [SOURCELOWER_TAR_GZ]
checksums = ['6113262f6e92c5bd474f2875fa1b01054c4ad5040f6b0da7c03c98821d9ae303']
builddependencies = [
('binutils', '2.34'),
]
# We hide it since this should be used just for Jupyter and the MPI version should be preferred for normal cases
hidden = True
# no quad precision, requires GCC v4.6 or higher
# see also
# http://www.fftw.org/doc/Extended-and-quadruple-precision-in-Fortran.html
with_quad_prec = True
# compilation fails on AMD systems when configuring with --enable-avx-128-fma,
# because Intel compilers do not support FMA4 instructions
use_fma4 = True
# can't find mpirun/mpiexec and fails
# runtest = 'check'
modextravars = {
'FFTW_ROOT': '%(installdir)s',
'FFTW_INCLUDE': '%(installdir)s/include',
'FFTW_LIB': '%(installdir)s/lib',
}
moduleclass = 'numlib'
name = 'FFTW'
version = '3.3.8'
homepage = 'http://www.fftw.org'
description = """FFTW is a C subroutine library for computing the discrete
Fourier transform (DFT) in one or more dimensions, of arbitrary input size,
and of both real and complex data."""
site_contacts = 'i.gutheil@fz-juelich.de'
toolchain = {'name': 'ipsmpi', 'version': '2020-mt'}
toolchainopts = {'pic': True}
source_urls = [homepage]
sources = [SOURCELOWER_TAR_GZ]
# See https://github.com/FFTW/fftw3/commit/10e2040af822a08ed49d2f6a1db45a7a3ad50582
patches = ['fftw_no-gcc-intel_2020.patch']
checksums = [
'6113262f6e92c5bd474f2875fa1b01054c4ad5040f6b0da7c03c98821d9ae303',
'f226cc6dbdc9d11d4340567ef3227d78284c4dc44b8e63c3901a079aa9527da6'
]
# no quad precision, requires GCC v4.6 or higher
# see also
# http://www.fftw.org/doc/Extended-and-quadruple-precision-in-Fortran.html
with_quad_prec = False
# Intel compilers do not support FMA4 instructions
use_fma4 = False
# can't find mpirun/mpiexec and fails
# runtest = 'check'
modextravars = {
'FFTW_ROOT': '%(installdir)s',
'FFTW_INCLUDE': '%(installdir)s/include',
'FFTW_LIB': '%(installdir)s/lib',
}
moduleclass = 'numlib'
easyblock = 'PythonBundle'
name = 'Fiona'
version = '1.8.16'
versionsuffix = "-Python-%(pyver)s"
homepage = 'https://github.com/Toblerity/Fiona'
description = """Fiona is designed to be simple and dependable. It focuses on reading and writing data
in standard Python IO style and relies upon familiar Python types and protocols such as files, dictionaries,
mappings, and iterators instead of classes specific to OGR. Fiona can read and write real-world data using
multi-layered GIS formats and zipped virtual file systems and integrates readily with other Python GIS
packages such as pyproj, Rtree, and Shapely."""
site_contacts = 'a.strube@fz-juelich.de'
toolchain = {'name': 'gpsmpi', 'version': '2020'}
dependencies = [
('Python', '3.8.5'),
('GDAL', '3.1.2', versionsuffix),
('Shapely', '1.7.1', versionsuffix), # optional
]
use_pip = True
exts_default_options = {'source_urls': [PYPI_SOURCE]}
exts_list = [
('cligj', '0.5.0', {
'checksums': ['6c7d52d529a78712491974f975c33473f430c0f7beb18c0d7a402a743dcb460a'],
}),
('click-plugins', '1.1.1', {
'checksums': ['46ab999744a9d831159c3411bb0c79346d94a444df9a3a3742e9ed63645f264b'],
}),
('munch', '2.5.0', {
'checksums': ['2d735f6f24d4dba3417fa448cae40c6e896ec1fdab6cdb5e6510999758a4dbd2'],
}),
(name, version, {
'checksums': ['fd6dfb65959becc916e9f6928618bfd59c16cdbc413ece0fbac61489cd11255f'],
}),
]
sanity_pip_check = True
sanity_check_paths = {
'files': ['bin/fio'],
'dirs': ['lib/python%(pyshortver)s/site-packages'],
}
moduleclass = 'data'
...@@ -35,7 +35,7 @@ dependencies = [ ...@@ -35,7 +35,7 @@ dependencies = [
('HDF5', '1.10.6'), ('HDF5', '1.10.6'),
('SciPy-Stack', '2020', versionsuffix, ('gcccoremkl', '9.3.0-2020.2.254')), ('SciPy-Stack', '2020', versionsuffix, ('gcccoremkl', '9.3.0-2020.2.254')),
('libvdwxc', '0.4.0'), ('libvdwxc', '0.4.0'),
('GPAW-setups', '0.9.20000', '', True), ('GPAW-setups', '0.9.20000', '', SYSTEM),
] ]
preconfigopts = 'export MKL_ROOT=$MKLROOT && ' preconfigopts = 'export MKL_ROOT=$MKLROOT && '
......
...@@ -48,7 +48,7 @@ builddependencies = [ ...@@ -48,7 +48,7 @@ builddependencies = [
# Removed ('hwloc', '2.0.3') from dependencies due to runtime failures. # Removed ('hwloc', '2.0.3') from dependencies due to runtime failures.
dependencies = [ dependencies = [
('CUDA', '11.0', '', True), ('CUDA', '11.0', '', SYSTEM),
] ]
configopts = '-DCMAKE_PREFIX_PATH=$EBROOTHWLOC -DMPIEXEC_MAX_NUMPROCS="24" ' configopts = '-DCMAKE_PREFIX_PATH=$EBROOTHWLOC -DMPIEXEC_MAX_NUMPROCS="24" '
......
...@@ -47,7 +47,7 @@ builddependencies = [ ...@@ -47,7 +47,7 @@ builddependencies = [
# Removed ('hwloc', '2.0.3') from dependencies due to runtime failures. # Removed ('hwloc', '2.0.3') from dependencies due to runtime failures.
dependencies = [ dependencies = [
('CUDA', '11.0', '', True), ('CUDA', '11.0', '', SYSTEM),
] ]
configopts = '-DCMAKE_PREFIX_PATH=$EBROOTHWLOC -DMPIEXEC_MAX_NUMPROCS="24"' configopts = '-DCMAKE_PREFIX_PATH=$EBROOTHWLOC -DMPIEXEC_MAX_NUMPROCS="24"'
......
easyblock = 'ConfigureMake'
name = 'GSL'
version = '2.6'
homepage = 'http://www.gnu.org/software/gsl/'
description = """The GNU Scientific Library (GSL) is a numerical library for C and C++ programmers.
The library provides a wide range of mathematical routines such as random number generators, special functions
and least-squares fitting.
"""
site_contacts = 'a.kreuzer@fz-juelich.de'
toolchain = {'name': 'GCCcore', 'version': '9.3.0'}
toolchainopts = {'opt': True, 'optarch': True, 'unroll': True, 'pic': True}
builddependencies = [('binutils', '2.34')]
source_urls = [GNU_SOURCE]
sources = [SOURCELOWER_TAR_GZ]
configopts = "--with-pic"
moduleclass = 'numlib'
...@@ -37,7 +37,7 @@ dependencies = [ ...@@ -37,7 +37,7 @@ dependencies = [
('Ghostscript', '9.52'), ('Ghostscript', '9.52'),
('GLib', '2.64.4'), ('GLib', '2.64.4'),
('GTS', '0.7.6'), ('GTS', '0.7.6'),
('Java', '1.8', '', True), ('Java', '1.8', '', SYSTEM),
('libpng', '1.6.37'), ('libpng', '1.6.37'),
('librsvg', '2.48.8'), ('librsvg', '2.48.8'),
('Pango', '1.44.7'), ('Pango', '1.44.7'),
......
...@@ -19,9 +19,9 @@ local_comp = (local_comp_name, local_comp_version) ...@@ -19,9 +19,9 @@ local_comp = (local_comp_name, local_comp_version)
dependencies = [ dependencies = [
local_comp, local_comp,
('binutils', '2.34', '', local_comp), ('binutils', '2.34', '', local_comp),
('imkl', local_mklver, '', True), ('imkl', local_mklver, '', SYSTEM),
] ]
hiddendependencies = [('imkl', local_mklver, '', True)] hiddendependencies = [('imkl', local_mklver, '', SYSTEM)]
moduleclass = 'toolchain' moduleclass = 'toolchain'
...@@ -16,6 +16,7 @@ toolchainopts = {'opt': True, 'pic': True} ...@@ -16,6 +16,7 @@ toolchainopts = {'opt': True, 'pic': True}
builddependencies = [ builddependencies = [
('flex', '2.6.4'), ('flex', '2.6.4'),
('Bison', '3.6.4'), ('Bison', '3.6.4'),
('Java', '15', '', SYSTEM),
] ]
dependencies = [ dependencies = [
...@@ -30,7 +31,8 @@ source_urls = ['http://www.hdfgroup.org/ftp/HDF/releases/HDF%s/src/' % version.s ...@@ -30,7 +31,8 @@ source_urls = ['http://www.hdfgroup.org/ftp/HDF/releases/HDF%s/src/' % version.s
preconfigopts = 'export CPATH="/usr/include/tirpc:$CPATH" && ' preconfigopts = 'export CPATH="/usr/include/tirpc:$CPATH" && '
preconfigopts += 'export LIBS="-ltirpc $LIBS" && ' preconfigopts += 'export LIBS="-ltirpc $LIBS" && '
configopts = '--with-szlib=$EBROOTSZIP --with-zlib=$EBROOTZLIB --includedir=%(installdir)s/include/%(namelower)s' configopts = '--with-szlib=$EBROOTSZIP --with-zlib=$EBROOTZLIB --enable-java '
configopts += '--includedir=%(installdir)s/include/%(namelower)s '
prebuildopts = 'export CPATH="/usr/include/tirpc:$CPATH" && ' prebuildopts = 'export CPATH="/usr/include/tirpc:$CPATH" && '
......
...@@ -16,6 +16,7 @@ toolchainopts = {'opt': True, 'pic': True} ...@@ -16,6 +16,7 @@ toolchainopts = {'opt': True, 'pic': True}
builddependencies = [ builddependencies = [
('flex', '2.6.4'), ('flex', '2.6.4'),
('Bison', '3.6.4'), ('Bison', '3.6.4'),
('Java', '15', '', SYSTEM),
] ]
dependencies = [ dependencies = [
...@@ -30,7 +31,8 @@ source_urls = ['http://www.hdfgroup.org/ftp/HDF/releases/HDF%s/src/' % version.s ...@@ -30,7 +31,8 @@ source_urls = ['http://www.hdfgroup.org/ftp/HDF/releases/HDF%s/src/' % version.s
preconfigopts = 'export CPATH="/usr/include/tirpc:$CPATH" && ' preconfigopts = 'export CPATH="/usr/include/tirpc:$CPATH" && '
preconfigopts += 'export LIBS="-ltirpc $LIBS" && ' preconfigopts += 'export LIBS="-ltirpc $LIBS" && '
configopts = '--with-szlib=$EBROOTSZIP --with-zlib=$EBROOTZLIB --includedir=%(installdir)s/include/%(namelower)s' configopts = '--with-szlib=$EBROOTSZIP --with-zlib=$EBROOTZLIB --enable-java '
configopts += '--includedir=%(installdir)s/include/%(namelower)s '
prebuildopts = 'export CPATH="/usr/include/tirpc:$CPATH" && ' prebuildopts = 'export CPATH="/usr/include/tirpc:$CPATH" && '
......
...@@ -20,6 +20,7 @@ checksums = ['5f9a3ee85db4ea1d3b1fa9159352aebc2af72732fc2f58c96a3f0768dba0e9aa'] ...@@ -20,6 +20,7 @@ checksums = ['5f9a3ee85db4ea1d3b1fa9159352aebc2af72732fc2f58c96a3f0768dba0e9aa']
builddependencies = [ builddependencies = [
('binutils', '2.34'), ('binutils', '2.34'),
('Java', '15', '', SYSTEM)
] ]
dependencies = [ dependencies = [
...@@ -27,4 +28,6 @@ dependencies = [ ...@@ -27,4 +28,6 @@ dependencies = [
('Szip', '2.1.1'), ('Szip', '2.1.1'),
] ]
configopts = '--enable-java'
moduleclass = 'data' moduleclass = 'data'
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment